signature stringlengths 43 39.1k | implementation stringlengths 0 450k |
|---|---|
public class SqlProcedureStatement { /** * Get the syntax that is required to invoke the procedure that is defined
* by the < code > ProcedureDescriptor < / code > that was passed to the
* constructor of this class .
* @ see SqlStatement # getStatement ( ) */
public String getStatement ( ) { } } | StringBuffer sb = new StringBuffer ( 512 ) ; int argumentCount = this . procedureDescriptor . getArgumentCount ( ) ; if ( this . procedureDescriptor . hasReturnValue ( ) ) { sb . append ( "{ ?= call " ) ; } else { sb . append ( "{ call " ) ; } sb . append ( this . procedureDescriptor . getName ( ) ) ; sb . append ( "(" ) ; for ( int i = 0 ; i < argumentCount ; i ++ ) { if ( i == 0 ) { sb . append ( "?" ) ; } else { sb . append ( ",?" ) ; } } sb . append ( ") }" ) ; return sb . toString ( ) ; |
public class BlobW { /** * Write blob data .
* @ param v byte to be written at current position . */
public void write ( int v ) throws IOException { } } | byte b [ ] = new byte [ 1 ] ; b [ 0 ] = ( byte ) v ; pos += blob . write ( b , 0 , pos , 1 ) ; |
public class LocationReferenceCheck { /** * Setter .
* @ param pVariableNames list of variable names to cover */
public void setVariableNames ( final String ... pVariableNames ) { } } | final Set < String > newVariableNames = new HashSet < > ( ) ; Collections . addAll ( newVariableNames , pVariableNames ) ; variableNames = Collections . unmodifiableSet ( newVariableNames ) ; |
public class RedisCounterFactory { /** * { @ inheritDoc } */
@ Override public void destroy ( ) { } } | try { super . destroy ( ) ; } catch ( Exception e ) { LOGGER . warn ( e . getMessage ( ) , e ) ; } if ( jedisPool != null && myOwnJedisPool ) { try { jedisPool . destroy ( ) ; } catch ( Exception e ) { LOGGER . warn ( e . getMessage ( ) , e ) ; } finally { jedisPool = null ; } } |
public class Invalidator { /** * Invalidates all keys from Near Caches of supplied data structure name .
* @ param dataStructureName name of the data structure to be cleared */
public final void invalidateAllKeys ( String dataStructureName , String sourceUuid ) { } } | checkNotNull ( sourceUuid , "sourceUuid cannot be null" ) ; int orderKey = getPartitionId ( dataStructureName ) ; Invalidation invalidation = newClearInvalidation ( dataStructureName , sourceUuid ) ; sendImmediately ( invalidation , orderKey ) ; |
public class Partial { /** * Gets a string version of the partial that lists all the fields .
* This method exists to provide a better debugging toString than
* the standard toString . This method lists all the fields and their
* values in a style similar to the collections framework .
* @ return a toString format that lists all the fields */
public String toStringList ( ) { } } | int size = size ( ) ; StringBuilder buf = new StringBuilder ( 20 * size ) ; buf . append ( '[' ) ; for ( int i = 0 ; i < size ; i ++ ) { if ( i > 0 ) { buf . append ( ',' ) . append ( ' ' ) ; } buf . append ( iTypes [ i ] . getName ( ) ) ; buf . append ( '=' ) ; buf . append ( iValues [ i ] ) ; } buf . append ( ']' ) ; return buf . toString ( ) ; |
public class ClassUtils { /** * Searches for a file system class by its name and attempts to load it .
* @ param className
* the name of the file system class
* @ return an instance of the file system class
* @ throws ClassNotFoundException
* thrown if no class with such a name can be found */
public static Class < ? extends FileSystem > getFileSystemByName ( final String className ) throws ClassNotFoundException { } } | return Class . forName ( className , true , getClassLoader ( ) ) . asSubclass ( FileSystem . class ) ; |
public class JxnetAutoConfiguration { /** * A handle link type .
* @ param context application context .
* @ return returns { @ link com . ardikars . jxpacket . common . layer . DataLinkLayer } . */
@ ConditionalOnClass ( { } } | Context . class , DataLinkType . class } ) // @ ConditionalOnBean ( Context . class )
@ Bean ( DATALINK_TYPE_BEAN_NAME ) public DataLinkType dataLinkType ( @ Qualifier ( CONTEXT_BEAN_NAME ) Context context ) { DataLinkType dataLinkType = context . pcapDataLink ( ) ; if ( LOGGER . isDebugEnabled ( ) ) { LOGGER . debug ( "Datalink type: {}." , dataLinkType ) ; } return dataLinkType ; |
public class ServerStateMachine { /** * Executes a state machine command . */
private void executeCommand ( long index , long sequence , long timestamp , ServerCommit commit , ServerSessionContext session , CompletableFuture < Result > future , ThreadContext context ) { } } | if ( ! log . isOpen ( ) ) { context . executor ( ) . execute ( ( ) -> future . completeExceptionally ( new IllegalStateException ( "log closed" ) ) ) ; return ; } // If the session is already in an inactive state , complete the future exceptionally .
if ( ! session . state ( ) . active ( ) ) { context . executor ( ) . execute ( ( ) -> future . completeExceptionally ( new UnknownSessionException ( "inactive session: " + session . id ( ) ) ) ) ; return ; } // Trigger scheduled callbacks in the state machine .
executor . tick ( index , timestamp ) ; // Update the state machine context with the commit index and local server context . The synchronous flag
// indicates whether the server expects linearizable completion of published events . Events will be published
// based on the configured consistency level for the context .
executor . init ( commit . index ( ) , commit . time ( ) , ServerStateMachineContext . Type . COMMAND ) ; // Store the event index to return in the command response .
long eventIndex = session . getEventIndex ( ) ; try { // Execute the state machine operation and get the result .
Object output = executor . executeOperation ( commit ) ; // Once the operation has been applied to the state machine , commit events published by the command .
// The state machine context will build a composite future for events published to all sessions .
executor . commit ( ) ; // Store the result for linearizability and complete the command .
Result result = new Result ( index , eventIndex , output ) ; session . registerResult ( sequence , result ) ; context . executor ( ) . execute ( ( ) -> future . complete ( result ) ) ; } catch ( Exception e ) { // If an exception occurs during execution of the command , store the exception .
Result result = new Result ( index , eventIndex , e ) ; session . registerResult ( sequence , result ) ; context . executor ( ) . execute ( ( ) -> future . complete ( result ) ) ; } |
public class DefaultGroovyMethods { /** * Create a suffix of the given Map by dropping as many entries as possible from the
* front of the original Map such that calling the given closure condition evaluates to
* true when passed each of the dropped entries ( or key / value pairs ) .
* < pre class = " groovyTestCase " >
* def shopping = [ milk : 1 , bread : 2 , chocolate : 3]
* assert shopping . dropWhile { it . key . size ( ) < 6 } = = [ chocolate : 3]
* assert shopping . dropWhile { it . value % 2 } = = [ bread : 2 , chocolate : 3]
* assert shopping . dropWhile { k , v - > k . size ( ) + v < = 7 } = = [ chocolate : 3]
* < / pre >
* If the map instance does not have ordered keys , then this function could appear to drop random
* entries . Groovy by default uses LinkedHashMap , so this shouldn ' t be an issue in the main .
* @ param self a Map
* @ param condition a 1 ( or 2 ) arg Closure that must evaluate to true for the
* entry ( or key and value ) to continue dropping elements
* @ return the shortest suffix of the given Map such that the given closure condition
* evaluates to true for each element dropped from the front of the Map
* @ since 1.8.7 */
public static < K , V > Map < K , V > dropWhile ( Map < K , V > self , @ ClosureParams ( MapEntryOrKeyValue . class ) Closure < ? > condition ) { } } | if ( self . isEmpty ( ) ) { return createSimilarMap ( self ) ; } Map < K , V > ret = createSimilarMap ( self ) ; boolean dropping = true ; BooleanClosureWrapper bcw = new BooleanClosureWrapper ( condition ) ; for ( Map . Entry < K , V > entry : self . entrySet ( ) ) { if ( dropping && ! bcw . callForMap ( entry ) ) dropping = false ; if ( ! dropping ) ret . put ( entry . getKey ( ) , entry . getValue ( ) ) ; } return ret ; |
public class BaseTileBasedDataProcessor { /** * Prepare relations which aren ' t written as relation and only exist as geo - inheritance .
* Root and part elements will be assigned to a tile ( not to be confused with later tile processing ) .
* This facilitates to find matching parts and accelerates the process .
* Coastlines are handled too , for simplicity reasons .
* @ param tdWay the way , which should be prepared */
protected void prepareImplicitWayRelations ( TDWay tdWay ) { } } | if ( tdWay . isCoastline ( ) ) { // find matching tiles on zoom level 12
Set < TileCoordinate > coastLineTiles = GeoUtils . mapWayToTiles ( tdWay , TileInfo . TILE_INFO_ZOOMLEVEL , 0 ) ; for ( TileCoordinate tileCoordinate : coastLineTiles ) { TLongHashSet coastlines = this . tilesToCoastlines . get ( tileCoordinate ) ; if ( coastlines == null ) { coastlines = new TLongHashSet ( ) ; this . tilesToCoastlines . put ( tileCoordinate , coastlines ) ; } coastlines . add ( tdWay . getId ( ) ) ; } } else if ( this . tagValues ) { if ( tdWay . isRootElement ( ) ) { Set < TileCoordinate > rootTiles = GeoUtils . mapWayToTiles ( tdWay , TileInfo . TILE_INFO_ZOOMLEVEL , 0 ) ; for ( TileCoordinate tileCoordinate : rootTiles ) { TLongHashSet roots = this . tilesToRootElements . get ( tileCoordinate ) ; if ( roots == null ) { roots = new TLongHashSet ( ) ; this . tilesToRootElements . put ( tileCoordinate , roots ) ; } roots . add ( tdWay . getId ( ) ) ; } } else if ( tdWay . isPartElement ( ) ) { Set < TileCoordinate > partTiles = GeoUtils . mapWayToTiles ( tdWay , TileInfo . TILE_INFO_ZOOMLEVEL , 0 ) ; for ( TileCoordinate tileCoordinate : partTiles ) { TLongHashSet parts = this . tilesToPartElements . get ( tileCoordinate ) ; if ( parts == null ) { parts = new TLongHashSet ( ) ; this . tilesToPartElements . put ( tileCoordinate , parts ) ; } parts . add ( tdWay . getId ( ) ) ; } } } |
public class QWhereSection { /** * { @ inheritDoc } */
@ Override public QWhereSection prepare ( final AbstractObjectQuery < ? > _query ) throws EFapsException { } } | this . part . prepare ( _query , null ) ; return this ; |
public class ConvolveImage { /** * Performs a horizontal 1D convolution across the image .
* @ param input The original image . Not modified .
* @ param output Where the resulting image is written to . Modified .
* @ param kernel The kernel that is being convolved . Not modified .
* @ param border How the image borders are handled . */
public static void horizontal ( Kernel1D_S32 kernel , GrayS16 input , GrayI16 output , ImageBorder_S32 < GrayS16 > border ) { } } | InputSanityCheck . checkSameShape ( input , output ) ; boolean processed = BOverrideConvolveImage . invokeNativeHorizontal ( kernel , input , output , border ) ; if ( ! processed ) { border . setImage ( input ) ; ConvolveImageNoBorder . horizontal ( kernel , input , output ) ; ConvolveJustBorder_General_SB . horizontal ( kernel , border , output ) ; } |
public class LongToInteger { /** * { @ inheritDoc } */
@ Override protected Object primTransform ( Object anObject ) throws Exception { } } | return Integer . valueOf ( ( ( Long ) anObject ) . toString ( ) ) ; |
public class IntegerConstant { /** * Writes the contents of the pool entry . */
void write ( ByteCodeWriter out ) throws IOException { } } | out . write ( ConstantPool . CP_INTEGER ) ; out . writeInt ( _value ) ; |
public class MapRealMatrix { /** * { @ inheritDoc } */
@ Override public RealMatrix copy ( ) { } } | MapRealMatrix copy = new MapRealMatrix ( rowDimension , columnDimension ) ; copy . entries . putAll ( entries ) ; return copy ; |
public class HttpDispatcherFactory { /** * @ see com . ibm . wsspi . channelfw . ChannelFactory # updateProperties ( java . util . Map ) */
public synchronized void updateProperties ( Map < Object , Object > properties ) { } } | this . commonProperties = properties ; this . globalBufferSize = properties . get ( PROP_BUFFERSIZE ) ; |
public class RelationMention { /** * setter for arguments - sets
* @ generated
* @ param v value to set into the feature */
public void setArguments ( FSArray v ) { } } | if ( RelationMention_Type . featOkTst && ( ( RelationMention_Type ) jcasType ) . casFeat_arguments == null ) jcasType . jcas . throwFeatMissing ( "arguments" , "de.julielab.jules.types.RelationMention" ) ; jcasType . ll_cas . ll_setRefValue ( addr , ( ( RelationMention_Type ) jcasType ) . casFeatCode_arguments , jcasType . ll_cas . ll_getFSRef ( v ) ) ; |
public class DB { /** * Log query result .
* @ param callInfo Call info .
* @ param data Data set . */
void logQuery ( CallInfo callInfo , DataSet data ) { } } | if ( isEnabled ( Option . LOG_QUERIES ) ) { log . write ( callInfo , data ) ; } |
public class ComponentTagDeclarationLibrary { /** * ( non - Javadoc )
* @ see org . apache . myfaces . view . facelets . tag . TagLibrary # containsTagHandler ( java . lang . String , java . lang . String ) */
public boolean containsTagHandler ( String ns , String localName ) { } } | if ( containsNamespace ( ns ) ) { Map < String , TagHandlerFactory > map = _factories . get ( ns ) ; if ( map == null ) { return false ; } return map . containsKey ( localName ) ; } else { return false ; } |
public class BeanUtils { /** * An helper method to build and throw a SQL Exception when a property cannot be set .
* @ param cause the cause
* @ param theType the type of the property
* @ param value the value of the property
* @ throws SQLException the SQL Exception */
public static void throwSQLException ( Exception cause , String theType , String value ) throws SQLException { } } | throw new SQLException ( "Invalid " + theType + " value: " + value , cause ) ; |
public class XLog { /** * Initialize log system , should be called only once .
* @ param logLevel the log level , logs with a lower level than which would not be printed
* @ param logConfiguration the log configuration
* @ param printers the printers , each log would be printed by all of the printers
* @ deprecated the log level is part of log configuration now ,
* use { @ link # init ( LogConfiguration , Printer . . . ) } instead , since 1.3.0 */
@ Deprecated public static void init ( int logLevel , LogConfiguration logConfiguration , Printer ... printers ) { } } | init ( new LogConfiguration . Builder ( logConfiguration ) . logLevel ( logLevel ) . build ( ) , printers ) ; |
public class EnumValue { /** * < p > Normalizes a string to be uppercase and use and underscore in place of whitespace . < / p >
* @ param name the name to be normalized
* @ return the normalized version of the name
* @ throws NullPointerException if the name is null */
static String normalize ( @ NonNull String name ) { } } | StringBuilder toReturn = new StringBuilder ( ) ; boolean previousSpace = false ; for ( char c : name . toCharArray ( ) ) { if ( Character . isWhitespace ( c ) ) { if ( ! previousSpace ) { toReturn . append ( '_' ) ; } previousSpace = true ; } else { previousSpace = false ; toReturn . append ( Character . toUpperCase ( c ) ) ; } } return toReturn . toString ( ) ; // name . toUpperCase ( ) . replaceAll ( " \ \ s + " , " _ " ) ; |
public class AbstractGenericRowMapper { /** * Generate SELECT statement to select a BO .
* The generated SQL will look like this
* { @ code SELECT all - columns FROM table WHERE pk - 1 = ? AND pk - 2 = ? . . . }
* @ param tableName
* @ return
* @ since 0.8.5 */
public String generateSqlSelect ( String tableName ) { } } | try { return cacheSQLs . get ( "SELECT:" + tableName , ( ) -> { return MessageFormat . format ( "SELECT {2} FROM {0} WHERE {1}" , tableName , strWherePkClause , strAllColumns ) ; } ) ; } catch ( ExecutionException e ) { throw new DaoException ( e ) ; } |
public class dnssuffix { /** * Use this API to fetch dnssuffix resources of given names . */
public static dnssuffix [ ] get ( nitro_service service , String Dnssuffix [ ] ) throws Exception { } } | if ( Dnssuffix != null && Dnssuffix . length > 0 ) { dnssuffix response [ ] = new dnssuffix [ Dnssuffix . length ] ; dnssuffix obj [ ] = new dnssuffix [ Dnssuffix . length ] ; for ( int i = 0 ; i < Dnssuffix . length ; i ++ ) { obj [ i ] = new dnssuffix ( ) ; obj [ i ] . set_Dnssuffix ( Dnssuffix [ i ] ) ; response [ i ] = ( dnssuffix ) obj [ i ] . get_resource ( service ) ; } return response ; } return null ; |
public class Op { /** * Creates an < i > operation expression < / i > on the specified target object .
* @ param target the target object on which the expression will execute
* @ return an operator , ready for chaining */
public static < T > Level0GenericUniqOperator < T , T > on ( final T target ) { } } | return new Level0GenericUniqOperator < T , T > ( ExecutionTarget . forOp ( target , Normalisation . NONE ) ) ; |
public class PropertyValues { /** * Sets a dynamic property value on an object . */
void setObjectDynamicProperty ( Object object , String propertyName , Object value ) { } } | DynamicPropertyBag bag = propertyBagAccess . getObjectDynamicPropertyBag ( object ) ; if ( bag == null ) { bag = new DynamicPropertyBag ( ) ; propertyBagAccess . setObjectDynamicPropertyBag ( object , bag ) ; } bag . set ( propertyName , value ) ; Properties . notify ( object , propertyName ) ; |
public class lbsipparameters { /** * Use this API to unset the properties of lbsipparameters resource .
* Properties that need to be unset are specified in args array . */
public static base_response unset ( nitro_service client , lbsipparameters resource , String [ ] args ) throws Exception { } } | lbsipparameters unsetresource = new lbsipparameters ( ) ; return unsetresource . unset_resource ( client , args ) ; |
public class PersistentExecutorImpl { /** * Returns status for the persistent task with the specified id .
* @ param taskId unique identifier for the task .
* @ return status for the persistent task with the specified id .
* If the task is not found , < code > null < / code > is returned . */
@ Override public < T > TaskStatus < T > getStatus ( long taskId ) { } } | String owner = getOwner ( ) ; if ( owner == null ) return null ; TransactionController tranController = new TransactionController ( ) ; TaskRecord taskRecord = null ; try { tranController . preInvoke ( ) ; taskRecord = taskStore . findById ( taskId , owner , false ) ; } catch ( Throwable x ) { tranController . setFailure ( x ) ; } finally { PersistentStoreException x = tranController . postInvoke ( PersistentStoreException . class ) ; // TODO proposed spec class
if ( x != null ) throw x ; } return taskRecord == null ? null : new TaskStatusImpl < T > ( taskRecord , this ) ; |
public class CmsImportVersion2 { /** * Merges a single page . < p >
* @ param resourcename the resource name of the page
* @ throws CmsImportExportException if something goes wrong
* @ throws CmsXmlException if the page file could not be unmarshalled */
private void mergePageFile ( String resourcename ) throws CmsXmlException , CmsImportExportException { } } | try { if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( Messages . get ( ) . getBundle ( ) . key ( Messages . LOG_IMPORTEXPORT_START_MERGING_1 , resourcename ) ) ; } // in OpenCms versions < 5 node names have not been case sensitive . thus , nodes are read both in upper
// and lower case letters , or have to be tested for equality ignoring upper / lower case . . .
// get the header file
CmsFile pagefile = m_cms . readFile ( resourcename , CmsResourceFilter . ALL ) ; Document contentXml = CmsXmlUtils . unmarshalHelper ( pagefile . getContents ( ) , null ) ; // get the < masterTemplate > node to check the content . this node contains the name of the template file .
String masterTemplateNodeName = "//masterTemplate" ; Node masterTemplateNode = contentXml . selectSingleNode ( masterTemplateNodeName ) ; if ( masterTemplateNode == null ) { masterTemplateNode = contentXml . selectSingleNode ( masterTemplateNodeName . toLowerCase ( ) ) ; } if ( masterTemplateNode == null ) { masterTemplateNode = contentXml . selectSingleNode ( masterTemplateNodeName . toUpperCase ( ) ) ; } // there is only one < masterTemplate > allowed
String mastertemplate = null ; if ( masterTemplateNode != null ) { // get the name of the mastertemplate
mastertemplate = masterTemplateNode . getText ( ) . trim ( ) ; } // get the < ELEMENTDEF > nodes to check the content .
// this node contains the information for the body element .
String elementDefNodeName = "//ELEMENTDEF" ; Node bodyNode = contentXml . selectSingleNode ( elementDefNodeName ) ; if ( bodyNode == null ) { bodyNode = contentXml . selectSingleNode ( elementDefNodeName . toLowerCase ( ) ) ; } // there is only one < ELEMENTDEF > allowed
if ( bodyNode != null ) { String bodyclass = null ; String bodyname = null ; Map < String , String > bodyparams = null ; List < Element > nodes = ( ( Element ) bodyNode ) . elements ( ) ; for ( int i = 0 , n = nodes . size ( ) ; i < n ; i ++ ) { Node node = nodes . get ( i ) ; if ( "CLASS" . equalsIgnoreCase ( node . getName ( ) ) ) { bodyclass = node . getText ( ) . trim ( ) ; } else if ( "TEMPLATE" . equalsIgnoreCase ( node . getName ( ) ) ) { bodyname = node . getText ( ) . trim ( ) ; if ( ! bodyname . startsWith ( "/" ) ) { bodyname = CmsResource . getFolderPath ( resourcename ) + bodyname ; } } else if ( "PARAMETER" . equalsIgnoreCase ( node . getName ( ) ) ) { Element paramElement = ( Element ) node ; if ( bodyparams == null ) { bodyparams = new HashMap < String , String > ( ) ; } bodyparams . put ( ( paramElement . attribute ( "name" ) ) . getText ( ) , paramElement . getTextTrim ( ) ) ; } } if ( ( mastertemplate == null ) || ( bodyname == null ) ) { CmsMessageContainer message = Messages . get ( ) . container ( Messages . ERR_IMPORTEXPORT_ERROR_CANNOT_MERGE_PAGE_FILE_3 , resourcename , mastertemplate , bodyname ) ; if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( message . key ( ) ) ; } throw new CmsImportExportException ( message ) ; } // lock the resource , so that it can be manipulated
m_cms . lockResource ( resourcename ) ; // get all properties
List < CmsProperty > properties = m_cms . readPropertyObjects ( resourcename , false ) ; // now get the content of the bodyfile and insert it into the control file
CmsFile bodyfile = m_cms . readFile ( bodyname , CmsResourceFilter . IGNORE_EXPIRATION ) ; // get the encoding
String encoding = CmsProperty . get ( CmsPropertyDefinition . PROPERTY_CONTENT_ENCODING , properties ) . getValue ( ) ; if ( encoding == null ) { encoding = OpenCms . getSystemInfo ( ) . getDefaultEncoding ( ) ; } if ( m_convertToXmlPage ) { if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( Messages . get ( ) . getBundle ( ) . key ( Messages . LOG_IMPORTEXPORT_START_CONVERTING_TO_XML_0 ) ) ; } CmsXmlPage xmlPage = CmsXmlPageConverter . convertToXmlPage ( m_cms , bodyfile . getContents ( ) , getLocale ( resourcename , properties ) , encoding ) ; if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( Messages . get ( ) . getBundle ( ) . key ( Messages . LOG_IMPORTEXPORT_END_CONVERTING_TO_XML_0 ) ) ; } if ( xmlPage != null ) { pagefile . setContents ( xmlPage . marshal ( ) ) ; // set the type to xml page
pagefile . setType ( CmsResourceTypeXmlPage . getStaticTypeId ( ) ) ; } } // add the template and other required properties
CmsProperty newProperty = new CmsProperty ( CmsPropertyDefinition . PROPERTY_TEMPLATE , mastertemplate , null ) ; // property lists must not contain equal properties
properties . remove ( newProperty ) ; properties . add ( newProperty ) ; // if set , add the bodyclass as property
if ( CmsStringUtil . isNotEmpty ( bodyclass ) ) { newProperty = new CmsProperty ( CmsPropertyDefinition . PROPERTY_TEMPLATE , mastertemplate , null ) ; newProperty . setAutoCreatePropertyDefinition ( true ) ; properties . remove ( newProperty ) ; properties . add ( newProperty ) ; } // if set , add bodyparams as properties
if ( bodyparams != null ) { for ( Iterator < Entry < String , String > > p = bodyparams . entrySet ( ) . iterator ( ) ; p . hasNext ( ) ; ) { Entry < String , String > entry = p . next ( ) ; String key = entry . getKey ( ) ; String value = entry . getValue ( ) ; newProperty = new CmsProperty ( key , value , null ) ; newProperty . setAutoCreatePropertyDefinition ( true ) ; properties . remove ( newProperty ) ; properties . add ( newProperty ) ; } } if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( Messages . get ( ) . getBundle ( ) . key ( Messages . LOG_IMPORTEXPORT_START_IMPORTING_XML_PAGE_0 ) ) ; } // now import the resource
m_cms . importResource ( resourcename , pagefile , pagefile . getContents ( ) , properties ) ; // finally delete the old body file , it is not needed anymore
m_cms . lockResource ( bodyname ) ; m_cms . deleteResource ( bodyname , CmsResource . DELETE_PRESERVE_SIBLINGS ) ; if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( Messages . get ( ) . getBundle ( ) . key ( Messages . LOG_IMPORTEXPORT_END_IMPORTING_XML_PAGE_0 ) ) ; } m_report . println ( org . opencms . report . Messages . get ( ) . container ( org . opencms . report . Messages . RPT_OK_0 ) , I_CmsReport . FORMAT_OK ) ; } else { // there are more than one template nodes in this control file
// convert the resource into a plain text file
// lock the resource , so that it can be manipulated
m_cms . lockResource ( resourcename ) ; // set the type to plain
pagefile . setType ( CmsResourceTypePlain . getStaticTypeId ( ) ) ; // write all changes
m_cms . writeFile ( pagefile ) ; // done , unlock the resource
m_cms . unlockResource ( resourcename ) ; if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( Messages . get ( ) . getBundle ( ) . key ( Messages . LOG_IMPORTEXPORT_CANNOT_CONVERT_XML_STRUCTURE_1 , resourcename ) ) ; } m_report . println ( Messages . get ( ) . container ( Messages . RPT_NOT_CONVERTED_0 ) , I_CmsReport . FORMAT_OK ) ; } if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( Messages . get ( ) . getBundle ( ) . key ( Messages . LOG_IMPORTEXPORT_END_MERGING_1 , resourcename ) ) ; } } catch ( CmsXmlException e ) { throw e ; } catch ( CmsException e ) { m_report . println ( e ) ; CmsMessageContainer message = Messages . get ( ) . container ( Messages . ERR_IMPORTEXPORT_ERROR_MERGING_PAGE_FILE_1 , resourcename ) ; if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( message . key ( ) , e ) ; } throw new CmsImportExportException ( message , e ) ; } |
public class AWSFMSClient { /** * Returns an array of < code > PolicyComplianceStatus < / code > objects in the response . Use
* < code > PolicyComplianceStatus < / code > to get a summary of which member accounts are protected by the specified
* policy .
* @ param listComplianceStatusRequest
* @ return Result of the ListComplianceStatus operation returned by the service .
* @ throws ResourceNotFoundException
* The specified resource was not found .
* @ throws InternalErrorException
* The operation failed because of a system problem , even though the request was valid . Retry your request .
* @ sample AWSFMS . ListComplianceStatus
* @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / fms - 2018-01-01 / ListComplianceStatus " target = " _ top " > AWS API
* Documentation < / a > */
@ Override public ListComplianceStatusResult listComplianceStatus ( ListComplianceStatusRequest request ) { } } | request = beforeClientExecution ( request ) ; return executeListComplianceStatus ( request ) ; |
public class TaskManagerService { /** * Return true if we are currently executing the given task . */
private boolean isOurActiveTask ( Tenant tenant , String taskID ) { } } | synchronized ( m_activeTasks ) { return m_activeTasks . containsKey ( createMapKey ( tenant , taskID ) ) ; } |
public class authenticationlocalpolicy_authenticationvserver_binding { /** * Use this API to fetch authenticationlocalpolicy _ authenticationvserver _ binding resources of given name . */
public static authenticationlocalpolicy_authenticationvserver_binding [ ] get ( nitro_service service , String name ) throws Exception { } } | authenticationlocalpolicy_authenticationvserver_binding obj = new authenticationlocalpolicy_authenticationvserver_binding ( ) ; obj . set_name ( name ) ; authenticationlocalpolicy_authenticationvserver_binding response [ ] = ( authenticationlocalpolicy_authenticationvserver_binding [ ] ) obj . get_resources ( service ) ; return response ; |
public class Jdk14Logger { /** * Log a message and exception with error log level . */
public void error ( Object message , Throwable exception ) { } } | log ( Level . SEVERE , String . valueOf ( message ) , exception ) ; |
public class IntegrationAccountBatchConfigurationsInner { /** * List the batch configurations for an integration account .
* @ param resourceGroupName The resource group name .
* @ param integrationAccountName The integration account name .
* @ param serviceCallback the async ServiceCallback to handle successful and failed responses .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the { @ link ServiceFuture } object */
public ServiceFuture < List < BatchConfigurationInner > > listAsync ( String resourceGroupName , String integrationAccountName , final ServiceCallback < List < BatchConfigurationInner > > serviceCallback ) { } } | return ServiceFuture . fromResponse ( listWithServiceResponseAsync ( resourceGroupName , integrationAccountName ) , serviceCallback ) ; |
public class ConstantPool { /** * Adds a name - and - type constant . */
public NameAndTypeConstant addNameAndType ( String name , String type ) { } } | NameAndTypeConstant entry = getNameAndType ( name , type ) ; if ( entry != null ) return entry ; Utf8Constant nameEntry = addUTF8 ( name ) ; Utf8Constant typeEntry = addUTF8 ( type ) ; entry = new NameAndTypeConstant ( this , _entries . size ( ) , nameEntry . getIndex ( ) , typeEntry . getIndex ( ) ) ; addConstant ( entry ) ; return entry ; |
public class DataIO { /** * Writes UTF - 8 encoded characters to the given stream , but does not write
* the length . */
public static final void writeUTF ( OutputStream out , char [ ] chars , int offset , int length ) throws IOException { } } | for ( int i = 0 ; i < length ; i ++ ) { int c = chars [ i + offset ] ; if ( ( c >= 0x0001 ) && ( c <= 0x007F ) ) { out . write ( c ) ; } else if ( c > 0x07FF ) { out . write ( 0xe0 | ( ( c >> 12 ) & 0x0f ) ) ; out . write ( 0x80 | ( ( c >> 6 ) & 0x3f ) ) ; out . write ( 0x80 | ( c & 0x3f ) ) ; } else { out . write ( 0xc0 | ( ( c >> 6 ) & 0x1f ) ) ; out . write ( 0x80 | ( c & 0x3f ) ) ; } } |
public class BaseConnection { /** * Closes connection */
public void close ( ) { } } | if ( closed ) { log . debug ( "Already closed, nothing to do" ) ; return ; } closed = true ; if ( scope != null ) { log . debug ( "Close, disconnect from scope, and children" ) ; try { // unregister all child scopes first
for ( IBasicScope basicScope : basicScopes ) { unregisterBasicScope ( basicScope ) ; } } catch ( Exception err ) { log . error ( "Error while unregistering basic scopes" , err ) ; } // disconnect
if ( scope != null ) { try { scope . disconnect ( this ) ; } catch ( Exception err ) { log . error ( "Error while disconnecting from scope: {}. {}" , scope , err ) ; } scope = null ; } } // unregister client
if ( client != null && client instanceof Client ) { ( ( Client ) client ) . unregister ( this ) ; } // alert our listeners
if ( connectionListeners != null ) { for ( IConnectionListener listener : connectionListeners ) { listener . notifyDisconnected ( this ) ; } connectionListeners . clear ( ) ; connectionListeners = null ; } |
public class CmsStringUtil { /** * Returns a string representation for the given collection using the given separator . < p >
* @ param collection the collection to print
* @ param separator the item separator
* @ return the string representation for the given collection */
public static String collectionAsString ( Collection < ? > collection , String separator ) { } } | StringBuffer string = new StringBuffer ( 128 ) ; Iterator < ? > it = collection . iterator ( ) ; while ( it . hasNext ( ) ) { string . append ( it . next ( ) ) ; if ( it . hasNext ( ) ) { string . append ( separator ) ; } } return string . toString ( ) ; |
public class SslUtils { /** * Add elements from { @ code names } into { @ code enabled } if they are in { @ code supported } . */
static void addIfSupported ( Set < String > supported , List < String > enabled , String ... names ) { } } | for ( String n : names ) { if ( supported . contains ( n ) ) { enabled . add ( n ) ; } } |
public class SyntaxReader { /** * Reads a comment and returns its body . */
private String readComment ( ) { } } | if ( pos == data . length || data [ pos ] != '/' ) throw new AssertionError ( ) ; pos ++ ; int commentType = pos < data . length ? data [ pos ++ ] : - 1 ; if ( commentType == '*' ) { StringBuilder result = new StringBuilder ( ) ; boolean startOfLine = true ; for ( ; pos + 1 < data . length ; pos ++ ) { char c = data [ pos ] ; if ( c == '*' && data [ pos + 1 ] == '/' ) { pos += 2 ; return result . toString ( ) . trim ( ) ; } if ( c == '\n' ) { result . append ( '\n' ) ; newline ( ) ; startOfLine = true ; } else if ( ! startOfLine ) { result . append ( c ) ; } else if ( c == '*' ) { if ( data [ pos + 1 ] == ' ' ) { pos += 1 ; // Skip a single leading space , if present .
} startOfLine = false ; } else if ( ! Character . isWhitespace ( c ) ) { result . append ( c ) ; startOfLine = false ; } } throw unexpected ( "unterminated comment" ) ; } else if ( commentType == '/' ) { if ( pos < data . length && data [ pos ] == ' ' ) { pos += 1 ; // Skip a single leading space , if present .
} int start = pos ; while ( pos < data . length ) { char c = data [ pos ++ ] ; if ( c == '\n' ) { newline ( ) ; break ; } } return new String ( data , start , pos - 1 - start ) ; } else { throw unexpected ( "unexpected '/'" ) ; } |
public class TransactionOptions { /** * Create a default set of options with a custom executor .
* @ param executor The executor to run the user callback code on .
* @ return The TransactionOptions object . */
@ Nonnull public static TransactionOptions create ( @ Nonnull Executor executor ) { } } | return new TransactionOptions ( DEFAULT_NUM_ATTEMPTS , executor , null ) ; |
public class StaticRunnerSupport { /** * Creates a pending test suite .
* < p > Test suites can contain :
* < ul >
* < li > specs ( defined by using { @ code it ( ) } < / li >
* < li > other suites ( defined by nesting { @ code describe ( ) } calls ) < / li >
* < li > { @ code beforeEach ( ) } and { @ code afterEach ( ) } handlers . < / li >
* < / ul >
* < p > For example :
* < pre > { @ code
* xdescribe ( " my test suite " , ( ) - > {
* / / This will not be executed
* } < / pre >
* @ param text A description of the test suite
* @ param block A code block that represents the test suite */
public static void xdescribe ( String text , PendingInvokable block ) { } } | failIfNoSuiteBuilderAvailable ( "xdescribe" ) ; suiteBuilder . xdescribe ( text , block ) ; |
public class Database { public void putDevicePipeProperty ( String deviceName , ArrayList < DbPipe > dbPipes ) throws DevFailed { } } | for ( DbPipe dbPipe : dbPipes ) databaseDAO . putDevicePipeProperty ( this , deviceName , dbPipe ) ; |
public class TextMessageBodyWriter { /** * { @ inheritDoc } */
@ Override public void writeTo ( final Object entity , final Class < ? > type , final Type genericType , final Annotation [ ] annotations , final MediaType mediaType , final MultivaluedMap < String , Object > httpHeaders , final OutputStream entityStream ) throws IOException , WebApplicationException { } } | if ( entity != null ) { Writer osw = new OutputStreamWriter ( entityStream , getCharset ( mediaType ) ) ; String s = entity . toString ( ) ; osw . write ( s , 0 , s . length ( ) ) ; osw . flush ( ) ; } |
public class RedisNumber { /** * 指定key的值加操作
* @ param integer
* @ return */
public long increase ( long integer ) { } } | try { return getJedisCommands ( groupName ) . incrBy ( key , integer ) ; } finally { getJedisProvider ( groupName ) . release ( ) ; } |
public class XpidlCompiler { /** * Compiles an . idl file into the corresponding . h and . xpt files .
* @ param task
* current cc task
* @ param outputDir
* output directory
* @ param sourceFiles
* source files
* @ param args
* command line arguments that appear before input files
* @ param endArgs
* command line arguments that appear after input files
* @ param relentless
* if true , do not stop at first compilation error
* @ param config
* compiler configuration
* @ param monitor
* progress monitor */
@ Override public void compile ( final CCTask task , final File outputDir , final String [ ] sourceFiles , final String [ ] args , final String [ ] endArgs , final boolean relentless , final CommandLineCompilerConfiguration config , final ProgressMonitor monitor ) { } } | BuildException exc = null ; final String [ ] thisSource = new String [ 1 ] ; final String [ ] tlbCommand = new String [ args . length + endArgs . length + 6 ] ; tlbCommand [ 0 ] = "xpidl" ; tlbCommand [ 1 ] = "-m" ; tlbCommand [ 2 ] = "typelib" ; final String [ ] headerCommand = new String [ args . length + endArgs . length + 6 ] ; headerCommand [ 0 ] = "xpidl" ; headerCommand [ 1 ] = "-m" ; headerCommand [ 2 ] = "header" ; for ( int i = 0 ; i < args . length ; i ++ ) { tlbCommand [ i + 3 ] = args [ i ] ; headerCommand [ i + 3 ] = args [ i ] ; } tlbCommand [ args . length + 3 ] = "-e" ; headerCommand [ args . length + 3 ] = "-e" ; int tlbIndex = args . length + 6 ; int headerIndex = args . length + 6 ; for ( final String endArg : endArgs ) { tlbCommand [ tlbIndex ++ ] = endArg ; headerCommand [ headerIndex ++ ] = endArg ; } for ( final String sourceFile : sourceFiles ) { tlbIndex = args . length + 4 ; headerIndex = args . length + 4 ; final String [ ] outputFileNames = getOutputFileNames ( sourceFile , null ) ; tlbCommand [ tlbIndex ++ ] = outputFileNames [ 0 ] ; tlbCommand [ tlbIndex ++ ] = sourceFile ; headerCommand [ headerIndex ++ ] = outputFileNames [ 1 ] ; headerCommand [ headerIndex ++ ] = sourceFile ; int retval = runCommand ( task , outputDir , tlbCommand ) ; if ( retval == 0 ) { retval = runCommand ( task , outputDir , headerCommand ) ; } if ( monitor != null ) { thisSource [ 0 ] = sourceFile ; monitor . progress ( thisSource ) ; } // if the process returned a failure code and
// we aren ' t holding an exception from an earlier
// interation
if ( retval != 0 && exc == null ) { // construct the exception
exc = new BuildException ( this . getCommand ( ) + " failed with return code " + retval , task . getLocation ( ) ) ; // and throw it now unless we are relentless
if ( ! relentless ) { throw exc ; } } } // if the compiler returned a failure value earlier
// then throw an exception
if ( exc != null ) { throw exc ; } |
public class ZipEntryUtil { /** * Copies a given ZIP entry to a ZIP file . If this . preserveTimestamps is true , original timestamp
* is carried over , otherwise uses current time .
* @ param originalEntry
* a ZIP entry from existing ZIP file .
* @ param in
* contents of the ZIP entry .
* @ param out
* target ZIP stream . */
static void copyEntry ( ZipEntry originalEntry , InputStream in , ZipOutputStream out , boolean preserveTimestamps ) throws IOException { } } | ZipEntry copy = copy ( originalEntry ) ; if ( preserveTimestamps ) { TimestampStrategyFactory . getInstance ( ) . setTime ( copy , originalEntry ) ; } else { copy . setTime ( System . currentTimeMillis ( ) ) ; } addEntry ( copy , new BufferedInputStream ( in ) , out ) ; |
public class AbstractExtensionProvider { /** * Register custom property for specified node type . */
public final < T > void registerProperty ( Class < T > object , String property , Function < T , Object > function ) { } } | PropertyProvider extender = extenderMap . computeIfAbsent ( object , aClass -> new PropertyProviderImpl ( ) ) ; extender . register ( property , function ) ; |
public class ServerImpl { /** * Check if the client is in a valid state .
* @ param client The client to test .
* @ param from The client id .
* @ param expected The expected client state .
* @ return < code > true < / code > if valid , < code > false < / code > else . */
private static boolean checkValidity ( ClientSocket client , byte from , StateConnection expected ) { } } | return from >= 0 && client . getState ( ) == expected ; |
public class SpreadsheetCompiler { /** * Generates DRL from the input stream containing the spreadsheet .
* @ param showPackage
* tells it to print or not print any package statements in the spreadsheet .
* @ param xlsStream
* The stream to the spreadsheet . Uses the first worksheet found
* for the decision tables , ignores others .
* @ return DRL xml , ready for use in drools . */
public String compile ( boolean showPackage , final InputStream xlsStream , final InputType type ) { } } | return compile ( xlsStream , type , new DefaultRuleSheetListener ( showPackage , trimCell ) ) ; |
public class DeferredFileOutputStream { /** * Switches the underlying output stream from a memory based stream to one
* that is backed by disk . This is the point at which we realise that too much
* data is being written to keep in memory , so we elect to switch to
* disk - based storage .
* @ exception IOException
* if an error occurs . */
@ Override protected void onThresholdReached ( ) throws IOException { } } | FileOutputStream aFOS = null ; try { aFOS = new FileOutputStream ( m_aOutputFile ) ; m_aMemoryOS . writeTo ( aFOS ) ; m_aCurrentOS = aFOS ; // Explicitly close the stream ( even though this is a no - op )
StreamHelper . close ( m_aMemoryOS ) ; m_aMemoryOS = null ; } catch ( final IOException ex ) { StreamHelper . close ( aFOS ) ; throw ex ; } |
public class BaseMessageRecordDesc { /** * Convenience method - Put the value for this param in the map .
* If it is not the correct object type , convert it first . */
public void put ( String strKey , Object objValue ) { } } | if ( this . getMessageFieldDesc ( strKey ) != null ) this . getMessageFieldDesc ( strKey ) . put ( objValue ) ; else if ( this . getMessage ( ) != null ) this . getMessage ( ) . putNative ( this . getFullKey ( strKey ) , objValue ) ; |
public class ProviderConfig { /** * Gets proxy class .
* @ return the proxyClass */
@ Override public Class < ? > getProxyClass ( ) { } } | if ( proxyClass != null ) { return proxyClass ; } try { if ( StringUtils . isNotBlank ( interfaceId ) ) { this . proxyClass = ClassUtils . forName ( interfaceId ) ; if ( ! proxyClass . isInterface ( ) ) { throw ExceptionUtils . buildRuntime ( "service.interfaceId" , interfaceId , "interfaceId must set interface class, not implement class" ) ; } } else { throw ExceptionUtils . buildRuntime ( "service.interfaceId" , "null" , "interfaceId must be not null" ) ; } } catch ( SofaRpcRuntimeException e ) { throw e ; } catch ( Throwable e ) { throw new SofaRpcRuntimeException ( e . getMessage ( ) , e ) ; } return proxyClass ; |
public class CommerceDiscountRelLocalServiceUtil { /** * Updates the commerce discount rel in the database or adds it if it does not yet exist . Also notifies the appropriate model listeners .
* @ param commerceDiscountRel the commerce discount rel
* @ return the commerce discount rel that was updated */
public static com . liferay . commerce . discount . model . CommerceDiscountRel updateCommerceDiscountRel ( com . liferay . commerce . discount . model . CommerceDiscountRel commerceDiscountRel ) { } } | return getService ( ) . updateCommerceDiscountRel ( commerceDiscountRel ) ; |
public class Functions { /** * Fluent limit operation using primitive types
* e . g .
* < pre >
* { @ code
* import static cyclops . ReactiveSeq . limitDoubles ;
* ReactiveSeq . ofDoubles ( 1d , 2d , 3d )
* . to ( limitDoubles ( 1 ) ) ;
* < / pre > */
public static Function < ? super ReactiveSeq < Double > , ? extends ReactiveSeq < Double > > limitDouble ( long maxSize ) { } } | return a -> a . doubles ( i -> i , s -> s . limit ( maxSize ) ) ; |
public class UIManager { /** * Retrieves a renderer which can renderer templates for the given context .
* @ param context the render context .
* @ return an appropriate renderer for the component and context , or null if a suitable renderer could not be found .
* @ deprecated Use { @ link WTemplate } instead . */
@ Deprecated public static Renderer getTemplateRenderer ( final RenderContext context ) { } } | String packageName = context . getRenderPackage ( ) ; Renderer renderer = INSTANCE . templateRenderers . get ( packageName ) ; if ( renderer == null ) { renderer = INSTANCE . findTemplateRenderer ( packageName ) ; } else if ( renderer == NULL_RENDERER ) { return null ; } return renderer ; |
public class SystemProperties { /** * Returns { @ link Boolean # TRUE } if the named system property exists and is equal to the string { @ code " true }
* ( ignoring case ) , returns { @ link Boolean # FALSE } if the system property exists and doesn ' t equal { @ code " true }
* otherwise returns { @ code null } if the named system property does not exist .
* @ param name the system property name .
* @ return { @ link Boolean # TRUE } , { @ link Boolean # FALSE } or { @ code null }
* @ since 2.16 */
@ CheckForNull public static Boolean optBoolean ( String name ) { } } | String v = getString ( name ) ; return v == null ? null : Boolean . parseBoolean ( v ) ; |
public class LineString { /** * Create a new instance of this class by defining a list of { @ link Point } s which follow the
* correct specifications described in the Point documentation . Note that there should not be any
* duplicate points inside the list and the points combined should create a LineString with a
* distance greater than 0.
* Note that if less than 2 points are passed in , a runtime exception will occur .
* @ param points a list of { @ link Point } s which make up the LineString geometry
* @ return a new instance of this class defined by the values passed inside this static factory
* method
* @ since 3.0.0 */
public static LineString fromLngLats ( @ NonNull List < Point > points ) { } } | return new LineString ( TYPE , null , points ) ; |
public class SecureUtil { /** * 生成公钥 , 仅用于非对称加密 < br >
* 算法见 : https : / / docs . oracle . com / javase / 7 / docs / technotes / guides / security / StandardNames . html # KeyFactory
* @ param algorithm 算法
* @ param keySpec { @ link KeySpec }
* @ return 公钥 { @ link PublicKey }
* @ since 3.1.1 */
public static PublicKey generatePublicKey ( String algorithm , KeySpec keySpec ) { } } | return KeyUtil . generatePublicKey ( algorithm , keySpec ) ; |
public class WorkManagerImpl { /** * Do first checks for work starting methods
* @ param work to check
* @ param startTimeout to check
* @ param execContext to check
* @ throws WorkException in case of check don ' t pass */
public void doFirstChecks ( Work work , long startTimeout , ExecutionContext execContext ) throws WorkException { } } | if ( isShutdown ( ) ) throw new WorkRejectedException ( bundle . workmanagerShutdown ( ) ) ; if ( work == null ) throw new WorkRejectedException ( bundle . workIsNull ( ) ) ; if ( startTimeout < 0 ) throw new WorkRejectedException ( bundle . startTimeoutIsNegative ( startTimeout ) ) ; checkAndVerifyWork ( work , execContext ) ; |
public class QrHelperFunctions_ZDRM { /** * Performs the following operation : < br >
* u [ ( startU + j ) : ( startU + numRows ) ] / = A < br >
* were u and A are a complex */
public static void divideElements ( final int j , final int numRows , final double [ ] u , final int startU , final double realA , final double imagA ) { } } | double mag2 = realA * realA + imagA * imagA ; int index = ( startU + j ) * 2 ; for ( int i = j ; i < numRows ; i ++ ) { double realU = u [ index ] ; double imagU = u [ index + 1 ] ; // u [ i + startU ] / = u _ 0;
u [ index ++ ] = ( realU * realA + imagU * imagA ) / mag2 ; u [ index ++ ] = ( imagU * realA - realU * imagA ) / mag2 ; } |
public class FileHeader { /** * Updates the gutter . */
public void update ( ) { } } | FileTextArea textArea = fileWindow . textArea ; Font font = textArea . getFont ( ) ; setFont ( font ) ; FontMetrics metrics = getFontMetrics ( font ) ; int h = metrics . getHeight ( ) ; int lineCount = textArea . getLineCount ( ) + 1 ; String dummy = Integer . toString ( lineCount ) ; if ( dummy . length ( ) < 2 ) { dummy = "99" ; } Dimension d = new Dimension ( ) ; d . width = metrics . stringWidth ( dummy ) + 16 ; d . height = lineCount * h + 100 ; setPreferredSize ( d ) ; setSize ( d ) ; |
public class OAuth { /** * Returns the client _ id to use in OAuth requests .
* @ param params the request parameters .
* @ param options the request options .
* @ return the client _ id . */
private static String getClientId ( Map < String , Object > params , RequestOptions options ) throws AuthenticationException { } } | String clientId = Stripe . clientId ; if ( ( options != null ) && ( options . getClientId ( ) != null ) ) { clientId = options . getClientId ( ) ; } if ( ( params != null ) && ( params . get ( "client_id" ) != null ) ) { clientId = ( String ) params . get ( "client_id" ) ; } if ( clientId == null ) { throw new AuthenticationException ( "No client_id provided. (HINT: set client_id key using 'Stripe.clientId = <CLIENT-ID>'. " + "You can find your client_ids in your Stripe dashboard at " + "https://dashboard.stripe.com/account/applications/settings, " + "after registering your account as a platform. See " + "https://stripe.com/docs/connect/standard-accounts for details, " + "or email support@stripe.com if you have any questions." , null , null , 0 ) ; } return clientId ; |
public class BeanUtils { /** * private methods - - - - - */
private static BeanInfo getBeanInfo ( Object bean ) { } } | try { return Introspector . getBeanInfo ( bean . getClass ( ) ) ; } catch ( IntrospectionException exception ) { // TODO : handle exception better ?
throw new RuntimeException ( "Error introspecting bean: " + bean , exception ) ; } |
public class CouchbaseAsyncBucket { /** * Helper method to stop tracing for the parent span given . */
private Action0 stopTracing ( final Span parent ) { } } | return new Action0 ( ) { @ Override public void call ( ) { if ( parent != null ) { environment . tracer ( ) . scopeManager ( ) . activate ( parent , true ) . close ( ) ; } } } ; |
public class ProductSearchClient { /** * Gets information associated with a ProductSet .
* < p > Possible errors :
* < p > & # 42 ; Returns NOT _ FOUND if the ProductSet does not exist .
* < p > Sample code :
* < pre > < code >
* try ( ProductSearchClient productSearchClient = ProductSearchClient . create ( ) ) {
* ProductSetName name = ProductSetName . of ( " [ PROJECT ] " , " [ LOCATION ] " , " [ PRODUCT _ SET ] " ) ;
* ProductSet response = productSearchClient . getProductSet ( name . toString ( ) ) ;
* < / code > < / pre >
* @ param name Resource name of the ProductSet to get .
* < p > Format is : ` projects / PROJECT _ ID / locations / LOG _ ID / productSets / PRODUCT _ SET _ ID `
* @ throws com . google . api . gax . rpc . ApiException if the remote call fails */
public final ProductSet getProductSet ( String name ) { } } | GetProductSetRequest request = GetProductSetRequest . newBuilder ( ) . setName ( name ) . build ( ) ; return getProductSet ( request ) ; |
public class DefaultNotificationManager { /** * { @ inheritDoc } */
@ Override public Multimap < String , NotificationChannel > findSubscribedRecipientsForDispatcher ( NotificationDispatcher dispatcher , String projectKey , SubscriberPermissionsOnProject subscriberPermissionsOnProject ) { } } | verifyProjectKey ( projectKey ) ; String dispatcherKey = dispatcher . getKey ( ) ; Set < SubscriberAndChannel > subscriberAndChannels = Arrays . stream ( notificationChannels ) . flatMap ( notificationChannel -> toSubscriberAndChannels ( dispatcherKey , projectKey , notificationChannel ) ) . collect ( Collectors . toSet ( ) ) ; if ( subscriberAndChannels . isEmpty ( ) ) { return ImmutableMultimap . of ( ) ; } ImmutableSetMultimap . Builder < String , NotificationChannel > builder = ImmutableSetMultimap . builder ( ) ; try ( DbSession dbSession = dbClient . openSession ( false ) ) { Set < String > authorizedLogins = keepAuthorizedLogins ( dbSession , projectKey , subscriberAndChannels , subscriberPermissionsOnProject ) ; subscriberAndChannels . stream ( ) . filter ( subscriberAndChannel -> authorizedLogins . contains ( subscriberAndChannel . getSubscriber ( ) . getLogin ( ) ) ) . forEach ( subscriberAndChannel -> builder . put ( subscriberAndChannel . getSubscriber ( ) . getLogin ( ) , subscriberAndChannel . getChannel ( ) ) ) ; } return builder . build ( ) ; |
public class SQSObjectMessage { /** * Serialize the < code > Serializable < / code > object to < code > String < / code > . */
protected static String serialize ( Serializable serializable ) throws JMSException { } } | if ( serializable == null ) { return null ; } String serializedString ; ObjectOutputStream objectOutputStream = null ; try { ByteArrayOutputStream bytesOut = new ByteArrayOutputStream ( ) ; objectOutputStream = new ObjectOutputStream ( bytesOut ) ; objectOutputStream . writeObject ( serializable ) ; objectOutputStream . flush ( ) ; serializedString = Base64 . encodeAsString ( bytesOut . toByteArray ( ) ) ; } catch ( IOException e ) { LOG . error ( "IOException: cannot serialize objectMessage" , e ) ; throw convertExceptionToMessageFormatException ( e ) ; } finally { if ( objectOutputStream != null ) { try { objectOutputStream . close ( ) ; } catch ( IOException e ) { LOG . warn ( e . getMessage ( ) ) ; } } } return serializedString ; |
public class Allure1Utils { /** * Generate title using name pattern . First step all " { method } " substrings will be replaced
* with given method name . Then replace all " { i } " substrings with i - th parameter . */
public static String getTitle ( final String namePattern , final String methodName , final Object instance , final Object ... parameters ) { } } | final String finalPattern = namePattern . replaceAll ( "\\{method}" , methodName ) . replaceAll ( "\\{this}" , String . valueOf ( instance ) ) ; final int paramsCount = parameters == null ? 0 : parameters . length ; final Object [ ] results = new Object [ paramsCount ] ; for ( int i = 0 ; i < paramsCount ; i ++ ) { results [ i ] = arrayToString ( parameters [ i ] ) ; } return MessageFormat . format ( finalPattern , results ) ; |
public class StringIterate { /** * Transform the int code point elements to a new string using the specified function { @ code function } .
* @ deprecated since 7.0 . Use { @ link # collectCodePoint ( String , CodePointFunction ) } instead . */
@ Deprecated public static String collect ( String string , CodePointFunction function ) { } } | return StringIterate . collectCodePoint ( string , function ) ; |
public class MinMax { /** * Combine two { @ code MinMax } objects .
* @ param other the other { @ code MinMax } object to combine
* @ return { @ code this }
* @ throws java . lang . NullPointerException if the { @ code other } object is
* { @ code null } . */
public MinMax < C > combine ( final MinMax < C > other ) { } } | _min = min ( _comparator , _min , other . _min ) ; _max = max ( _comparator , _max , other . _max ) ; _count += other . _count ; return this ; |
public class MessageBuilder { /** * Retrieved the message mapped with the given key .
* Perform the search by iterating over all resource bundles available in reverse order .
* @ param messageKey the key of the message to translate
* @ return the translated message or null */
private String findMessage ( final String messageKey ) { } } | String message = null ; try { if ( ! this . resourceBundles . isEmpty ( ) ) { for ( int i = this . resourceBundles . size ( ) - 1 ; i >= 0 && message == null ; i -- ) { if ( this . resourceBundles . get ( i ) . containsKey ( messageKey ) ) { message = this . resourceBundles . get ( i ) . getString ( messageKey ) ; } } } } catch ( final MissingResourceException e ) { LOGGER . error ( "Message key not found into resource bundle" , e ) ; } return message ; |
public class UniverseApi { /** * Get system jumps Get the number of jumps in solar systems within the last
* hour ending at the timestamp of the Last - Modified header , excluding
* wormhole space . Only systems with jumps will be listed - - - This route is
* cached for up to 3600 seconds
* @ param datasource
* The server name you would like data from ( optional , default to
* tranquility )
* @ param ifNoneMatch
* ETag from a previous request . A 304 will be returned if this
* matches the current ETag ( optional )
* @ return List & lt ; SystemJumpsResponse & gt ;
* @ throws ApiException
* If fail to call the API , e . g . server error or cannot
* deserialize the response body */
public List < SystemJumpsResponse > getUniverseSystemJumps ( String datasource , String ifNoneMatch ) throws ApiException { } } | ApiResponse < List < SystemJumpsResponse > > resp = getUniverseSystemJumpsWithHttpInfo ( datasource , ifNoneMatch ) ; return resp . getData ( ) ; |
public class DatabaseDAODefaultImpl { public String [ ] get_attribute_alias_list ( Database database , String wildcard ) throws DevFailed { } } | DeviceData argIn = new DeviceData ( ) ; argIn . insert ( wildcard ) ; DeviceData argOut = command_inout ( database , "DbGetAttributeAliasList" , argIn ) ; return argOut . extractStringArray ( ) ; |
public class BasicNlsMessage { /** * This method gets the { @ link NlsTemplate } of this message .
* @ param resolver is the { @ link NlsTemplateResolver } used to { @ link NlsTemplateResolver # resolveTemplate ( String )
* resolve } the { @ link NlsTemplate } if NOT yet available .
* @ return the text the { @ link NlsTemplate } . */
public NlsTemplate getTemplate ( NlsTemplateResolver resolver ) { } } | if ( this . template == null ) { synchronized ( this ) { if ( this . template == null ) { NlsTemplateResolver templateResolver ; if ( resolver == null ) { templateResolver = NlsAccess . getTemplateResolver ( ) ; } else { templateResolver = resolver ; } this . template = templateResolver . resolveTemplate ( this . message ) ; } } } return this . template ; |
public class LicenseKeyPair { /** * Get the byte representation of the private key as it is returned by the underlying security library . This is
* NOT the byte array that contains the algorithm at the start . This is the key in raw format .
* @ return the key as bytes */
public byte [ ] getPrivate ( ) { } } | keyNotNull ( pair . getPrivate ( ) ) ; Key key = pair . getPrivate ( ) ; return getKeyBytes ( key ) ; |
public class DssatXFileOutput { /** * DSSAT Experiment Data Output method
* @ param arg0 file output path
* @ param result data holder object */
@ Override public void writeFile ( String arg0 , Map result ) { } } | // Initial variables
HashMap expData = ( HashMap ) result ; ArrayList < HashMap > soilArr = readSWData ( expData , "soil" ) ; ArrayList < HashMap > wthArr = readSWData ( expData , "weather" ) ; HashMap soilData ; HashMap wthData ; BufferedWriter bwX ; // output object
StringBuilder sbGenData = new StringBuilder ( ) ; // construct the data info in the output
StringBuilder sbDomeData = new StringBuilder ( ) ; // construct the dome info in the output
StringBuilder sbNotesData = new StringBuilder ( ) ; // construct the data info in the output
StringBuilder sbData = new StringBuilder ( ) ; // construct the data info in the output
StringBuilder eventPart2 = new StringBuilder ( ) ; // output string for second part of event data
HashMap sqData ; ArrayList < HashMap > evtArr ; // Arraylist for section data holder
ArrayList < HashMap > adjArr ; HashMap evtData ; // int trmnNum ; / / total numbers of treatment in the data holder
int cuNum ; // total numbers of cultivars in the data holder
int flNum ; // total numbers of fields in the data holder
int saNum ; // total numbers of soil analysis in the data holder
int icNum ; // total numbers of initial conditions in the data holder
int mpNum ; // total numbers of plaintings in the data holder
int miNum ; // total numbers of irrigations in the data holder
int mfNum ; // total numbers of fertilizers in the data holder
int mrNum ; // total numbers of residues in the data holder
int mcNum ; // total numbers of chemical in the data holder
int mtNum ; // total numbers of tillage in the data holder
int meNum ; // total numbers of enveronment modification in the data holder
int mhNum ; // total numbers of harvest in the data holder
int smNum ; // total numbers of simulation controll record
ArrayList < HashMap > sqArr ; // array for treatment record
ArrayList < HashMap > cuArr = new ArrayList ( ) ; // array for cultivars record
ArrayList < HashMap > flArr = new ArrayList ( ) ; // array for fields record
ArrayList < HashMap > saArr = new ArrayList ( ) ; // array for soil analysis record
ArrayList < HashMap > icArr = new ArrayList ( ) ; // array for initial conditions record
ArrayList < HashMap > mpArr = new ArrayList ( ) ; // array for plaintings record
ArrayList < ArrayList < HashMap > > miArr = new ArrayList ( ) ; // array for irrigations record
ArrayList < ArrayList < HashMap > > mfArr = new ArrayList ( ) ; // array for fertilizers record
ArrayList < ArrayList < HashMap > > mrArr = new ArrayList ( ) ; // array for residues record
ArrayList < ArrayList < HashMap > > mcArr = new ArrayList ( ) ; // array for chemical record
ArrayList < ArrayList < HashMap > > mtArr = new ArrayList ( ) ; // array for tillage record
ArrayList < ArrayList < HashMap > > meArr = new ArrayList ( ) ; // array for enveronment modification record
ArrayList < ArrayList < HashMap > > mhArr = new ArrayList ( ) ; // array for harvest record
ArrayList < HashMap > smArr = new ArrayList ( ) ; // array for simulation control record
// String exName ;
boolean isFallow = false ; try { // Set default value for missing data
if ( expData == null || expData . isEmpty ( ) ) { return ; } // decompressData ( ( HashMap ) result ) ;
setDefVal ( ) ; // Initial BufferedWriter
String fileName = getFileName ( result , "X" ) ; arg0 = revisePath ( arg0 ) ; outputFile = new File ( arg0 + fileName ) ; bwX = new BufferedWriter ( new FileWriter ( outputFile ) ) ; // Output XFile
// EXP . DETAILS Section
sbError . append ( String . format ( "*EXP.DETAILS: %1$-10s %2$s\r\n\r\n" , getFileName ( result , "" ) . replaceAll ( "\\." , "" ) , getObjectOr ( expData , "local_name" , defValBlank ) ) ) ; // GENERAL Section
sbGenData . append ( "*GENERAL\r\n" ) ; // People
if ( ! getObjectOr ( expData , "person_notes" , "" ) . equals ( "" ) ) { sbGenData . append ( String . format ( "@PEOPLE\r\n %1$s\r\n" , getObjectOr ( expData , "person_notes" , defValBlank ) ) ) ; } // Address
if ( getObjectOr ( expData , "institution" , "" ) . equals ( "" ) ) { // if ( ! getObjectOr ( expData , " fl _ loc _ 1 " , " " ) . equals ( " " )
// & & getObjectOr ( expData , " fl _ loc _ 2 " , " " ) . equals ( " " )
// & & getObjectOr ( expData , " fl _ loc _ 3 " , " " ) . equals ( " " ) ) {
// sbGenData . append ( String . format ( " @ ADDRESS \ r \ n % 3 $ s , % 2 $ s , % 1 $ s \ r \ n " ,
// getObjectOr ( expData , " fl _ loc _ 1 " , defValBlank ) . toString ( ) ,
// getObjectOr ( expData , " fl _ loc _ 2 " , defValBlank ) . toString ( ) ,
// getObjectOr ( expData , " fl _ loc _ 3 " , defValBlank ) . toString ( ) ) ) ;
} else { sbGenData . append ( String . format ( "@ADDRESS\r\n %1$s\r\n" , getObjectOr ( expData , "institution" , defValBlank ) ) ) ; } // Site
if ( ! getObjectOr ( expData , "site_name" , "" ) . equals ( "" ) ) { sbGenData . append ( String . format ( "@SITE\r\n %1$s\r\n" , getObjectOr ( expData , "site_name" , defValBlank ) ) ) ; } // Plot Info
if ( isPlotInfoExist ( expData ) ) { sbGenData . append ( "@ PAREA PRNO PLEN PLDR PLSP PLAY HAREA HRNO HLEN HARM.........\r\n" ) ; sbGenData . append ( String . format ( " %1$6s %2$5s %3$5s %4$5s %5$5s %6$-5s %7$5s %8$5s %9$5s %10$-15s\r\n" , formatNumStr ( 6 , expData , "plta" , defValR ) , formatNumStr ( 5 , expData , "pltr#" , defValI ) , formatNumStr ( 5 , expData , "pltln" , defValR ) , formatNumStr ( 5 , expData , "pldr" , defValI ) , formatNumStr ( 5 , expData , "pltsp" , defValI ) , getObjectOr ( expData , "pllay" , defValC ) , formatNumStr ( 5 , expData , "pltha" , defValR ) , formatNumStr ( 5 , expData , "plth#" , defValI ) , formatNumStr ( 5 , expData , "plthl" , defValR ) , getObjectOr ( expData , "plthm" , defValC ) ) ) ; } // Notes
if ( ! getObjectOr ( expData , "tr_notes" , "" ) . equals ( "" ) ) { sbNotesData . append ( "@NOTES\r\n" ) ; String notes = getObjectOr ( expData , "tr_notes" , defValC ) ; notes = notes . replaceAll ( "\\\\r\\\\n" , "\r\n" ) ; // If notes contain newline code , then write directly
if ( notes . contains ( "\r\n" ) ) { // sbData . append ( String . format ( " % 1 $ s \ r \ n " , notes ) ) ;
sbNotesData . append ( notes ) ; } // Otherwise , add newline for every 75 - bits charactors
else { while ( notes . length ( ) > 75 ) { sbNotesData . append ( " " ) . append ( notes . substring ( 0 , 75 ) ) . append ( "\r\n" ) ; notes = notes . substring ( 75 ) ; } sbNotesData . append ( " " ) . append ( notes ) . append ( "\r\n" ) ; } } sbData . append ( "\r\n" ) ; // TREATMENT Section
sqArr = getDataList ( expData , "dssat_sequence" , "data" ) ; evtArr = getDataList ( expData , "management" , "events" ) ; adjArr = getObjectOr ( expData , "adjustments" , new ArrayList ( ) ) ; ArrayList < HashMap > rootArr = getObjectOr ( expData , "dssat_root" , new ArrayList ( ) ) ; ArrayList < HashMap > meOrgArr = getDataList ( expData , "dssat_environment_modification" , "data" ) ; ArrayList < HashMap > smOrgArr = getDataList ( expData , "dssat_simulation_control" , "data" ) ; String seqId ; String em ; String sm ; boolean isAnyDomeApplied = false ; LinkedHashMap < String , String > appliedDomes = new LinkedHashMap < String , String > ( ) ; sbData . append ( "*TREATMENTS -------------FACTOR LEVELS------------\r\n" ) ; sbData . append ( "@N R O C TNAME.................... CU FL SA IC MP MI MF MR MC MT ME MH SM\r\n" ) ; // if there is no sequence info , create dummy data
if ( sqArr . isEmpty ( ) ) { sqArr . add ( new HashMap ( ) ) ; } // Set sequence related block info
for ( int i = 0 ; i < sqArr . size ( ) ; i ++ ) { sqData = sqArr . get ( i ) ; seqId = getValueOr ( sqData , "seqid" , defValBlank ) ; em = getValueOr ( sqData , "em" , defValBlank ) ; sm = getValueOr ( sqData , "sm" , defValBlank ) ; if ( i < soilArr . size ( ) ) { soilData = soilArr . get ( i ) ; } else if ( soilArr . isEmpty ( ) ) { soilData = new HashMap ( ) ; } else { soilData = soilArr . get ( 0 ) ; } if ( soilData == null ) { soilData = new HashMap ( ) ; } if ( i < wthArr . size ( ) ) { wthData = wthArr . get ( i ) ; } else if ( wthArr . isEmpty ( ) ) { wthData = new HashMap ( ) ; } else { wthData = wthArr . get ( 0 ) ; } if ( wthData == null ) { wthData = new HashMap ( ) ; } HashMap cuData = new HashMap ( ) ; HashMap flData = new HashMap ( ) ; HashMap mpData = new HashMap ( ) ; ArrayList < HashMap > miSubArr = new ArrayList < HashMap > ( ) ; ArrayList < HashMap > mfSubArr = new ArrayList < HashMap > ( ) ; ArrayList < HashMap > mrSubArr = new ArrayList < HashMap > ( ) ; ArrayList < HashMap > mcSubArr = new ArrayList < HashMap > ( ) ; ArrayList < HashMap > mtSubArr = new ArrayList < HashMap > ( ) ; ArrayList < HashMap > meSubArr = new ArrayList < HashMap > ( ) ; ArrayList < HashMap > mhSubArr = new ArrayList < HashMap > ( ) ; HashMap smData = new HashMap ( ) ; HashMap rootData ; // Set exp root info
if ( i < rootArr . size ( ) ) { rootData = rootArr . get ( i ) ; } else { rootData = expData ; } // Applied DOME Info
String trt_name = getValueOr ( sqData , "trt_name" , getValueOr ( rootData , "trt_name" , getValueOr ( rootData , "exname" , defValC ) ) ) ; if ( getValueOr ( rootData , "dome_applied" , "" ) . equals ( "Y" ) ) { // If it comes with seasonal exname style
if ( trt_name . matches ( ".+[^_]__\\d+$" ) ) { trt_name = trt_name . replaceAll ( "__\\d+$" , "_*" ) ; if ( appliedDomes . get ( trt_name + " Field " ) == null ) { appliedDomes . put ( trt_name + " Field " , getAppliedDomes ( rootData , "field" ) ) ; } if ( appliedDomes . get ( trt_name + " Seasonal " ) == null ) { appliedDomes . put ( trt_name + " Seasonal " , getAppliedDomes ( rootData , "seasonal" ) ) ; } } else { appliedDomes . put ( trt_name + " Field " , getAppliedDomes ( rootData , "field" ) ) ; appliedDomes . put ( trt_name + " Seasonal " , getAppliedDomes ( rootData , "seasonal" ) ) ; } isAnyDomeApplied = true ; copyItem ( smData , rootData , "seasonal_dome_applied" ) ; } else { appliedDomes . put ( trt_name , "" ) ; } // Set field info
copyItem ( flData , rootData , "id_field" ) ; String dssat_wst_id = getValueOr ( rootData , "dssat_wst_id" , "" ) ; // Weather data is missing plus dssat _ wst _ id is available
if ( ! dssat_wst_id . equals ( "" ) && wthData . isEmpty ( ) ) { flData . put ( "wst_id" , dssat_wst_id ) ; } else { flData . put ( "wst_id" , getWthFileName ( rootData ) ) ; } copyItem ( flData , rootData , "flsl" ) ; copyItem ( flData , rootData , "flob" ) ; copyItem ( flData , rootData , "fl_drntype" ) ; copyItem ( flData , rootData , "fldrd" ) ; copyItem ( flData , rootData , "fldrs" ) ; copyItem ( flData , rootData , "flst" ) ; if ( soilData . get ( "sltx" ) != null ) { copyItem ( flData , soilData , "sltx" ) ; } else { copyItem ( flData , rootData , "sltx" ) ; } copyItem ( flData , soilData , "sldp" ) ; copyItem ( flData , rootData , "soil_id" ) ; copyItem ( flData , rootData , "fl_name" ) ; copyItem ( flData , rootData , "fl_lat" ) ; copyItem ( flData , rootData , "fl_long" ) ; copyItem ( flData , rootData , "flele" ) ; copyItem ( flData , rootData , "farea" ) ; copyItem ( flData , rootData , "fllwr" ) ; copyItem ( flData , rootData , "flsla" ) ; copyItem ( flData , getObjectOr ( rootData , "dssat_info" , new HashMap ( ) ) , "flhst" ) ; copyItem ( flData , getObjectOr ( rootData , "dssat_info" , new HashMap ( ) ) , "fhdur" ) ; // remove the " _ trno " in the soil _ id when soil analysis is available
String soilId = getValueOr ( flData , "soil_id" , "" ) ; if ( soilId . length ( ) > 10 && soilId . matches ( "\\w+_\\d+" ) || soilId . length ( ) < 8 ) { flData . put ( "soil_id" , getSoilID ( flData ) ) ; } flNum = setSecDataArr ( flData , flArr ) ; // Set initial condition info
icNum = setSecDataArr ( getObjectOr ( rootData , "initial_conditions" , new HashMap ( ) ) , icArr ) ; // Set environment modification info
for ( HashMap meOrgArr1 : meOrgArr ) { if ( em . equals ( meOrgArr1 . get ( "em" ) ) ) { HashMap tmp = new HashMap ( ) ; tmp . putAll ( meOrgArr1 ) ; tmp . remove ( "em" ) ; meSubArr . add ( tmp ) ; } } if ( ! adjArr . isEmpty ( ) ) { ArrayList < HashMap < String , String > > startArr = new ArrayList ( ) ; ArrayList < HashMap < String , String > > endArr = new ArrayList ( ) ; String sdat = getValueOr ( rootData , "sdat" , "" ) ; if ( sdat . equals ( "" ) ) { sdat = getPdate ( result ) ; } final List < String > vars = Arrays . asList ( new String [ ] { "tmax" , "tmin" , "srad" , "wind" , "rain" , "co2y" , "tdew" } ) ; final List < String > emVars = Arrays . asList ( new String [ ] { "emmax" , "emmin" , "emrad" , "emwnd" , "emrai" , "emco2" , "emdew" } ) ; final List < String > emcVars = Arrays . asList ( new String [ ] { "ecmax" , "ecmin" , "ecrad" , "ecwnd" , "ecrai" , "ecco2" , "ecdew" } ) ; for ( HashMap adjData : adjArr ) { if ( getValueOr ( adjData , "seqid" , defValBlank ) . equals ( seqId ) ) { String var = getValueOr ( adjData , "variable" , "" ) ; String ecVar ; String val = getValueOr ( adjData , "value" , "" ) ; String method = getValueOr ( adjData , "method" , "" ) ; String startDate = getValueOr ( adjData , "startdate" , sdat ) ; String endDate = getValueOr ( adjData , "enddate" , "" ) ; int idx = vars . indexOf ( var ) ; if ( idx < 0 ) { LOG . warn ( "Found unsupported adjusment variable [" + var + "], will be ignored." ) ; sbError . append ( "Found unsupported adjusment variable [" ) . append ( var ) . append ( "], will be ignored." ) ; continue ; } else { var = emVars . get ( idx ) ; ecVar = emcVars . get ( idx ) ; } if ( method . equals ( "substitute" ) ) { method = "R" ; } else if ( method . equals ( "delta" ) ) { if ( val . startsWith ( "-" ) ) { val = val . substring ( 1 ) ; method = "S" ; } else { method = "A" ; } } else if ( method . equals ( "multiply" ) ) { method = "M" ; } else { LOG . warn ( "Found unsupported adjusment method [" + method + "] for [" + var + "], will be ignored." ) ; sbError . append ( "Found unsupported adjusment method [" ) . append ( method ) . append ( "] for [" ) . append ( var ) . append ( "], will be ignored." ) ; continue ; } HashMap tmp = DssatCommonInput . getSectionDataWithNocopy ( startArr , "date" , startDate ) ; if ( tmp == null ) { tmp = new HashMap ( ) ; startArr . add ( tmp ) ; } tmp . put ( var , val ) ; tmp . put ( ecVar , method ) ; tmp . put ( "date" , startDate ) ; if ( ! endDate . equals ( "" ) ) { tmp = DssatCommonInput . getSectionDataWithNocopy ( endArr , "date" , endDate ) ; if ( tmp == null ) { tmp = new HashMap ( ) ; endArr . add ( tmp ) ; } tmp . put ( var , "0" ) ; tmp . put ( ecVar , "A" ) ; tmp . put ( "date" , endDate ) ; } } } meSubArr . addAll ( startArr ) ; meSubArr . addAll ( endArr ) ; } // Set soil analysis info
// ArrayList < HashMap > icSubArr = getDataList ( expData , " initial _ condition " , " soilLayer " ) ;
ArrayList < HashMap > soilLarys = getObjectOr ( soilData , "soilLayer" , new ArrayList ( ) ) ; // / / If it is stored in the initial condition block
// if ( isSoilAnalysisExist ( icSubArr ) ) {
// HashMap saData = new HashMap ( ) ;
// ArrayList < HashMap > saSubArr = new ArrayList < HashMap > ( ) ;
// HashMap saSubData ;
// for ( int i = 0 ; i < icSubArr . size ( ) ; i + + ) {
// saSubData = new HashMap ( ) ;
// copyItem ( saSubData , icSubArr . get ( i ) , " sabl " , " icbl " , false ) ;
// copyItem ( saSubData , icSubArr . get ( i ) , " sasc " , " slsc " , false ) ;
// saSubArr . add ( saSubData ) ;
// copyItem ( saData , soilData , " sadat " ) ;
// saData . put ( " soilLayer " , saSubArr ) ;
// saNum = setSecDataArr ( saData , saArr ) ;
// } else
// If it is stored in the soil block
if ( isSoilAnalysisExist ( soilLarys ) ) { HashMap saData = new HashMap ( ) ; ArrayList < HashMap > saSubArr = new ArrayList < HashMap > ( ) ; HashMap saSubData ; for ( HashMap soilLary : soilLarys ) { saSubData = new HashMap ( ) ; copyItem ( saSubData , soilLary , "sabl" , "sllb" , false ) ; copyItem ( saSubData , soilLary , "saoc" , "sloc" , false ) ; copyItem ( saSubData , soilLary , "sasc" , "slsc" , false ) ; saSubArr . add ( saSubData ) ; } copyItem ( saData , soilData , "sadat" ) ; saData . put ( "soilLayer" , saSubArr ) ; saNum = setSecDataArr ( saData , saArr ) ; } else { saNum = 0 ; } // Set simulation control info
for ( HashMap smOrgArr1 : smOrgArr ) { if ( sm . equals ( smOrgArr1 . get ( "sm" ) ) ) { smData . putAll ( smOrgArr1 ) ; smData . remove ( "sm" ) ; break ; } } // if ( smData . isEmpty ( ) ) {
// smData . put ( " fertilizer " , mfSubArr ) ;
// smData . put ( " irrigation " , miSubArr ) ;
// smData . put ( " planting " , mpData ) ;
copyItem ( smData , rootData , "sdat" ) ; copyItem ( smData , rootData , "dssat_model" ) ; copyItem ( smData , getObjectOr ( wthData , "weather" , new HashMap ( ) ) , "co2y" ) ; // Loop all event data
for ( HashMap evtArr1 : evtArr ) { evtData = new HashMap ( ) ; evtData . putAll ( evtArr1 ) ; // Check if it has same sequence number
if ( getValueOr ( evtData , "seqid" , defValBlank ) . equals ( seqId ) ) { evtData . remove ( "seqid" ) ; // Planting event
if ( getValueOr ( evtData , "event" , defValBlank ) . equals ( "planting" ) ) { // Set cultivals info
copyItem ( cuData , evtData , "cul_name" ) ; copyItem ( cuData , evtData , "crid" ) ; copyItem ( cuData , evtData , "cul_id" ) ; copyItem ( cuData , evtData , "dssat_cul_id" ) ; copyItem ( cuData , evtData , "rm" ) ; copyItem ( cuData , evtData , "cul_notes" ) ; translateTo2BitCrid ( cuData ) ; // Set planting info
// To make comparision only on the planting information ( without crop data ) , use HashMap to rebuild pure planting map
copyItem ( mpData , evtData , "date" ) ; copyItem ( mpData , evtData , "edate" ) ; copyItem ( mpData , evtData , "plpop" ) ; copyItem ( mpData , evtData , "plpoe" ) ; copyItem ( mpData , evtData , "plma" ) ; copyItem ( mpData , evtData , "plds" ) ; copyItem ( mpData , evtData , "plrs" ) ; copyItem ( mpData , evtData , "plrd" ) ; copyItem ( mpData , evtData , "pldp" ) ; copyItem ( mpData , evtData , "plmwt" ) ; copyItem ( mpData , evtData , "page" ) ; copyItem ( mpData , evtData , "plenv" ) ; copyItem ( mpData , evtData , "plph" ) ; copyItem ( mpData , evtData , "plspl" ) ; copyItem ( mpData , evtData , "pl_name" ) ; } // irrigation event
else if ( getValueOr ( evtData , "event" , "" ) . equals ( "irrigation" ) ) { miSubArr . add ( evtData ) ; } // fertilizer event
else if ( getValueOr ( evtData , "event" , "" ) . equals ( "fertilizer" ) ) { mfSubArr . add ( evtData ) ; } // organic _ matter event
else if ( getValueOr ( evtData , "event" , "" ) . equals ( "organic_matter" ) ) { // P . S . change event name to organic - materials ; Back to organic _ matter again .
mrSubArr . add ( evtData ) ; } // chemical event
else if ( getValueOr ( evtData , "event" , "" ) . equals ( "chemical" ) ) { mcSubArr . add ( evtData ) ; } // tillage event
else if ( getValueOr ( evtData , "event" , "" ) . equals ( "tillage" ) ) { mtSubArr . add ( evtData ) ; // } / / environment _ modification event
// else if ( getValueOr ( evtData , " event " , " " ) . equals ( " environment _ modification " ) ) {
// meSubArr . add ( evtData ) ;
} // harvest event
else if ( getValueOr ( evtData , "event" , "" ) . equals ( "harvest" ) ) { mhSubArr . add ( evtData ) ; if ( ! getValueOr ( evtData , "date" , "" ) . trim ( ) . equals ( "" ) ) { smData . put ( "hadat_valid" , "Y" ) ; } // copyItem ( smData , evtData , " hadat " , " date " , false ) ;
} else { } } else { } } // Cancel for assume default value handling
// / / If alternative fields are avaiable for fertilizer data
// if ( mfSubArr . isEmpty ( ) ) {
// if ( ! getObjectOr ( result , " fen _ tot " , " " ) . equals ( " " )
// | | ! getObjectOr ( result , " fep _ tot " , " " ) . equals ( " " )
// | | ! getObjectOr ( result , " fek _ tot " , " " ) . equals ( " " ) ) {
// mfSubArr . add ( new HashMap ( ) ) ;
// Specila handling for fallow experiment
if ( mpData . isEmpty ( ) ) { isFallow = true ; cuData . put ( "crid" , "FA" ) ; cuData . put ( "dssat_cul_id" , "IB0001" ) ; cuData . put ( "cul_name" , "Fallow" ) ; if ( mhSubArr . isEmpty ( ) ) { HashMap mhData = new HashMap ( ) ; copyItem ( mhData , rootData , "date" , "endat" , false ) ; // mhData . put ( " hastg " , " GS000 " ) ;
mhSubArr . add ( mhData ) ; smData . put ( "hadat_valid" , "Y" ) ; } } cuNum = setSecDataArr ( cuData , cuArr ) ; mpNum = setSecDataArr ( mpData , mpArr , true ) ; miNum = setSecDataArr ( miSubArr , miArr , true ) ; mfNum = setSecDataArr ( mfSubArr , mfArr , true ) ; mrNum = setSecDataArr ( mrSubArr , mrArr , true ) ; mcNum = setSecDataArr ( mcSubArr , mcArr , true ) ; mtNum = setSecDataArr ( mtSubArr , mtArr , true ) ; meNum = setSecDataArr ( meSubArr , meArr ) ; // Since old format of EM might exist , skip the check for EM
mhNum = setSecDataArr ( mhSubArr , mhArr , true ) ; smNum = setSecDataArr ( smData , smArr ) ; if ( smNum == 0 ) { smNum = 1 ; } StringBuilder sbBadEventRrrMsg = new StringBuilder ( ) ; boolean badEventFlg = false ; if ( mpNum < 0 ) { mpNum = 0 ; badEventFlg = true ; sbBadEventRrrMsg . append ( "MP " ) ; } if ( miNum < 0 ) { miNum = 0 ; badEventFlg = true ; sbBadEventRrrMsg . append ( "MI " ) ; } if ( mfNum < 0 ) { mfNum = 0 ; badEventFlg = true ; sbBadEventRrrMsg . append ( "MF " ) ; } if ( mrNum < 0 ) { mrNum = 0 ; badEventFlg = true ; sbBadEventRrrMsg . append ( "MR " ) ; } if ( mcNum < 0 ) { mcNum = 0 ; badEventFlg = true ; sbBadEventRrrMsg . append ( "MC " ) ; } if ( mtNum < 0 ) { mtNum = 0 ; badEventFlg = true ; sbBadEventRrrMsg . append ( "MT " ) ; } if ( meNum < 0 ) { meNum = 0 ; badEventFlg = true ; sbBadEventRrrMsg . append ( "ME " ) ; } if ( mhNum < 0 ) { mhNum = 0 ; badEventFlg = true ; sbBadEventRrrMsg . append ( "MH " ) ; } if ( badEventFlg ) { String tmp = sbBadEventRrrMsg . toString ( ) ; sbBadEventRrrMsg = new StringBuilder ( ) ; sbBadEventRrrMsg . append ( " ! Bad Event detected for " ) . append ( tmp ) ; } sbData . append ( String . format ( "%1$-3s%2$1s %3$1s %4$1s %5$-25s %6$2s %7$2s %8$2s %9$2s %10$2s %11$2s %12$2s %13$2s %14$2s %15$2s %16$2s %17$2s %18$2s%19$s\r\n" , String . format ( "%2s" , getValueOr ( sqData , "trno" , "1" ) ) , // For 3 - bit treatment number
getValueOr ( sqData , "sq" , "1" ) , // P . S . default value here is based on document DSSAT vol2 . pdf
getValueOr ( sqData , "op" , "1" ) , getValueOr ( sqData , "co" , "0" ) , formatStr ( 25 , sqData , "trt_name" , getValueOr ( rootData , "trt_name" , getValueOr ( rootData , "exname" , defValC ) ) ) , cuNum , // getObjectOr ( data , " ge " , defValI ) . toString ( ) ,
flNum , // getObjectOr ( data , " fl " , defValI ) . toString ( ) ,
saNum , // getObjectOr ( data , " sa " , defValI ) . toString ( ) ,
icNum , // getObjectOr ( data , " ic " , defValI ) . toString ( ) ,
mpNum , // getObjectOr ( data , " pl " , defValI ) . toString ( ) ,
miNum , // getObjectOr ( data , " ir " , defValI ) . toString ( ) ,
mfNum , // getObjectOr ( data , " fe " , defValI ) . toString ( ) ,
mrNum , // getObjectOr ( data , " om " , defValI ) . toString ( ) ,
mcNum , // getObjectOr ( data , " ch " , defValI ) . toString ( ) ,
mtNum , // getObjectOr ( data , " ti " , defValI ) . toString ( ) ,
meNum , // getObjectOr ( data , " em " , defValI ) . toString ( ) ,
mhNum , // getObjectOr ( data , " ha " , defValI ) . toString ( ) ,
smNum , sbBadEventRrrMsg . toString ( ) ) ) ; } sbData . append ( "\r\n" ) ; // CULTIVARS Section
if ( ! cuArr . isEmpty ( ) ) { sbData . append ( "*CULTIVARS\r\n" ) ; sbData . append ( "@C CR INGENO CNAME\r\n" ) ; for ( int idx = 0 ; idx < cuArr . size ( ) ; idx ++ ) { HashMap secData = cuArr . get ( idx ) ; // String cul _ id = defValC ;
String crid = getValueOr ( secData , "crid" , "" ) ; // Checl if necessary data is missing
if ( crid . equals ( "" ) ) { sbError . append ( "! Warning: Incompleted record because missing data : [crid]\r\n" ) ; // } else {
// / / Set cultivar id a default value deponds on the crop id
// if ( crid . equals ( " MZ " ) ) {
// cul _ id = " 990002 " ;
// } else {
// cul _ id = " 99999 " ;
// if ( getObjectOr ( secData , " cul _ id " , " " ) . equals ( " " ) ) {
// sbError . append ( " ! Warning : Incompleted record because missing data : [ cul _ id ] , and will use default value ' " ) . append ( cul _ id ) . append ( " ' \ r \ n " ) ;
} sbData . append ( String . format ( "%1$2s %2$-2s %3$-6s %4$s\r\n" , idx + 1 , formatStr ( 2 , secData , "crid" , defValBlank ) , // P . S . if missing , default value use blank string
formatStr ( 6 , secData , "dssat_cul_id" , getValueOr ( secData , "cul_id" , defValC ) ) , // P . S . Set default value which is deponds on crid ( Cancelled )
getValueOr ( secData , "cul_name" , defValC ) ) ) ; if ( ! getValueOr ( secData , "rm" , "" ) . equals ( "" ) || ! getValueOr ( secData , "cul_notes" , "" ) . equals ( "" ) ) { if ( sbNotesData . toString ( ) . equals ( "" ) ) { sbNotesData . append ( "@NOTES\r\n" ) ; } sbNotesData . append ( " Cultivar Additional Info\r\n" ) ; sbNotesData . append ( " C RM CNAME CUL_NOTES\r\n" ) ; sbNotesData . append ( String . format ( "%1$2s %2$4s %3$s\r\n" , idx + 1 , getValueOr ( secData , "rm" , defValC ) , getValueOr ( secData , "cul_notes" , defValC ) ) ) ; } } sbData . append ( "\r\n" ) ; } else if ( ! isFallow ) { sbError . append ( "! Warning: There is no cultivar data in the experiment.\r\n" ) ; } // FIELDS Section
if ( ! flArr . isEmpty ( ) ) { sbData . append ( "*FIELDS\r\n" ) ; sbData . append ( "@L ID_FIELD WSTA.... FLSA FLOB FLDT FLDD FLDS FLST SLTX SLDP ID_SOIL FLNAME\r\n" ) ; eventPart2 = new StringBuilder ( ) ; eventPart2 . append ( "@L ...........XCRD ...........YCRD .....ELEV .............AREA .SLEN .FLWR .SLAS FLHST FHDUR\r\n" ) ; } else { sbError . append ( "! Warning: There is no field data in the experiment.\r\n" ) ; } for ( int idx = 0 ; idx < flArr . size ( ) ; idx ++ ) { HashMap secData = flArr . get ( idx ) ; // Check if the necessary is missing
if ( getObjectOr ( secData , "wst_id" , "" ) . equals ( "" ) ) { sbError . append ( "! Warning: Incompleted record because missing data : [wst_id]\r\n" ) ; } String soil_id = getValueOr ( secData , "soil_id" , defValC ) ; if ( soil_id . equals ( "" ) ) { sbError . append ( "! Warning: Incompleted record because missing data : [soil_id]\r\n" ) ; } else if ( soil_id . length ( ) > 10 ) { sbError . append ( "! Warning: Oversized data : [soil_id] " ) . append ( soil_id ) . append ( "\r\n" ) ; } sbData . append ( String . format ( "%1$2s %2$-8s %3$-8s %4$5s %5$5s %6$-5s %7$5s %8$5s %9$-5s %10$-5s%11$5s %12$-10s %13$s\r\n" , // P . S . change length definition to match current way
idx + 1 , formatStr ( 8 , secData , "id_field" , defValC ) , formatStr ( 8 , secData , "wst_id" , defValC ) , formatStr ( 4 , secData , "flsl" , defValC ) , formatNumStr ( 5 , secData , "flob" , defValR ) , formatStr ( 5 , secData , "fl_drntype" , defValC ) , formatNumStr ( 5 , secData , "fldrd" , defValR ) , formatNumStr ( 5 , secData , "fldrs" , defValR ) , formatStr ( 5 , secData , "flst" , defValC ) , formatStr ( 5 , transSltx ( getValueOr ( secData , "sltx" , defValC ) ) , "sltx" ) , formatNumStr ( 5 , secData , "sldp" , defValR ) , soil_id , getValueOr ( secData , "fl_name" , defValC ) ) ) ; eventPart2 . append ( String . format ( "%1$2s %2$15s %3$15s %4$9s %5$17s %6$5s %7$5s %8$5s %9$5s %10$5s\r\n" , idx + 1 , formatNumStr ( 15 , secData , "fl_long" , defValR ) , formatNumStr ( 15 , secData , "fl_lat" , defValR ) , formatNumStr ( 9 , secData , "flele" , defValR ) , formatNumStr ( 17 , secData , "farea" , defValR ) , "-99" , // P . S . SLEN keeps - 99
formatNumStr ( 5 , secData , "fllwr" , defValR ) , formatNumStr ( 5 , secData , "flsla" , defValR ) , formatStr ( 5 , secData , "flhst" , defValC ) , formatNumStr ( 5 , secData , "fhdur" , defValR ) ) ) ; } if ( ! flArr . isEmpty ( ) ) { sbData . append ( eventPart2 . toString ( ) ) . append ( "\r\n" ) ; } // SOIL ANALYSIS Section
if ( ! saArr . isEmpty ( ) ) { sbData . append ( "*SOIL ANALYSIS\r\n" ) ; for ( int idx = 0 ; idx < saArr . size ( ) ; idx ++ ) { HashMap secData = ( HashMap ) saArr . get ( idx ) ; sbData . append ( "@A SADAT SMHB SMPX SMKE SANAME\r\n" ) ; sbData . append ( String . format ( "%1$2s %2$5s %3$5s %4$5s %5$5s %6$s\r\n" , idx + 1 , formatDateStr ( getValueOr ( secData , "sadat" , defValD ) ) , getValueOr ( secData , "samhb" , defValC ) , getValueOr ( secData , "sampx" , defValC ) , getValueOr ( secData , "samke" , defValC ) , getValueOr ( secData , "sa_name" , defValC ) ) ) ; ArrayList < HashMap > subDataArr = getObjectOr ( secData , "soilLayer" , new ArrayList ( ) ) ; if ( ! subDataArr . isEmpty ( ) ) { sbData . append ( "@A SABL SADM SAOC SANI SAPHW SAPHB SAPX SAKE SASC\r\n" ) ; } for ( HashMap subData : subDataArr ) { sbData . append ( String . format ( "%1$2s %2$5s %3$5s %4$5s %5$5s %6$5s %7$5s %8$5s %9$5s %10$5s\r\n" , idx + 1 , formatNumStr ( 5 , subData , "sabl" , defValR ) , formatNumStr ( 5 , subData , "sabdm" , defValR ) , formatNumStr ( 5 , subData , "saoc" , defValR ) , formatNumStr ( 5 , subData , "sani" , defValR ) , formatNumStr ( 5 , subData , "saphw" , defValR ) , formatNumStr ( 5 , subData , "saphb" , defValR ) , formatNumStr ( 5 , subData , "sapx" , defValR ) , formatNumStr ( 5 , subData , "sake" , defValR ) , formatNumStr ( 5 , subData , "sasc" , defValR ) ) ) ; } } sbData . append ( "\r\n" ) ; } // INITIAL CONDITIONS Section
if ( ! icArr . isEmpty ( ) ) { sbData . append ( "*INITIAL CONDITIONS\r\n" ) ; for ( int idx = 0 ; idx < icArr . size ( ) ; idx ++ ) { HashMap secData = icArr . get ( idx ) ; String brokenMark = "" ; if ( getValueOr ( secData , "icdat" , defValD ) . equals ( defValD ) ) { brokenMark = "!" ; } sbData . append ( brokenMark ) . append ( "@C PCR ICDAT ICRT ICND ICRN ICRE ICWD ICRES ICREN ICREP ICRIP ICRID ICNAME\r\n" ) ; sbData . append ( brokenMark ) . append ( String . format ( "%1$2s %2$5s %3$5s %4$5s %5$5s %6$5s %7$5s %8$5s %9$5s %10$5s %11$5s %12$5s %13$5s %14$s\r\n" , idx + 1 , translateTo2BitCrid ( secData , "icpcr" , defValC ) , formatDateStr ( getValueOr ( secData , "icdat" , getPdate ( result ) ) ) , formatNumStr ( 5 , secData , "icrt" , defValR ) , formatNumStr ( 5 , secData , "icnd" , defValR ) , formatNumStr ( 5 , secData , "icrz#" , defValR ) , formatNumStr ( 5 , secData , "icrze" , defValR ) , formatNumStr ( 5 , secData , "icwt" , defValR ) , formatNumStr ( 5 , secData , "icrag" , defValR ) , formatNumStr ( 5 , secData , "icrn" , defValR ) , formatNumStr ( 5 , secData , "icrp" , defValR ) , formatNumStr ( 5 , secData , "icrip" , defValR ) , formatNumStr ( 5 , secData , "icrdp" , defValR ) , getValueOr ( secData , "ic_name" , defValC ) ) ) ; ArrayList < HashMap > subDataArr = getObjectOr ( secData , "soilLayer" , new ArrayList ( ) ) ; if ( ! subDataArr . isEmpty ( ) ) { sbData . append ( brokenMark ) . append ( "@C ICBL SH2O SNH4 SNO3\r\n" ) ; } for ( HashMap subData : subDataArr ) { sbData . append ( brokenMark ) . append ( String . format ( "%1$2s %2$5s %3$5s %4$5s %5$5s\r\n" , idx + 1 , formatNumStr ( 5 , subData , "icbl" , defValR ) , formatNumStr ( 5 , subData , "ich2o" , defValR ) , formatNumStr ( 5 , subData , "icnh4" , defValR ) , formatNumStr ( 5 , subData , "icno3" , defValR ) ) ) ; } } sbData . append ( "\r\n" ) ; } // PLANTING DETAILS Section
if ( ! mpArr . isEmpty ( ) ) { sbData . append ( "*PLANTING DETAILS\r\n" ) ; sbData . append ( "@P PDATE EDATE PPOP PPOE PLME PLDS PLRS PLRD PLDP PLWT PAGE PENV PLPH SPRL PLNAME\r\n" ) ; for ( int idx = 0 ; idx < mpArr . size ( ) ; idx ++ ) { HashMap secData = mpArr . get ( idx ) ; // Check if necessary data is missing
String pdate = getValueOr ( secData , "date" , "" ) ; if ( pdate . equals ( "" ) ) { sbError . append ( "! Warning: Incompleted record because missing data : [pdate]\r\n" ) ; } else if ( formatDateStr ( pdate ) . equals ( defValD ) ) { sbError . append ( "! Warning: Incompleted record because variable [pdate] with invalid value [" ) . append ( pdate ) . append ( "]\r\n" ) ; } if ( getValueOr ( secData , "plpop" , getValueOr ( secData , "plpoe" , "" ) ) . equals ( "" ) ) { sbError . append ( "! Warning: Incompleted record because missing data : [plpop] and [plpoe]\r\n" ) ; } if ( getValueOr ( secData , "plrs" , "" ) . equals ( "" ) ) { sbError . append ( "! Warning: Incompleted record because missing data : [plrs]\r\n" ) ; } // if ( getValueOr ( secData , " plma " , " " ) . equals ( " " ) ) {
// sbError . append ( " ! Warning : missing data : [ plma ] , and will automatically use default value ' S ' \ r \ n " ) ;
// if ( getValueOr ( secData , " plds " , " " ) . equals ( " " ) ) {
// sbError . append ( " ! Warning : missing data : [ plds ] , and will automatically use default value ' R ' \ r \ n " ) ;
// if ( getValueOr ( secData , " pldp " , " " ) . equals ( " " ) ) {
// sbError . append ( " ! Warning : missing data : [ pldp ] , and will automatically use default value ' 7 ' \ r \ n " ) ;
// mm - > cm
String pldp = getValueOr ( secData , "pldp" , "" ) ; if ( ! pldp . equals ( "" ) ) { try { BigDecimal pldpBD = new BigDecimal ( pldp ) ; pldpBD = pldpBD . divide ( new BigDecimal ( "10" ) ) ; secData . put ( "pldp" , pldpBD . toString ( ) ) ; } catch ( NumberFormatException e ) { } } sbData . append ( String . format ( "%1$2s %2$5s %3$5s %4$5s %5$5s %6$5s %7$5s %8$5s %9$5s %10$5s %11$5s %12$5s %13$5s %14$5s %15$5s %16$s\r\n" , idx + 1 , formatDateStr ( getValueOr ( secData , "date" , defValD ) ) , formatDateStr ( getValueOr ( secData , "edate" , defValD ) ) , formatNumStr ( 5 , secData , "plpop" , getValueOr ( secData , "plpoe" , defValR ) ) , formatNumStr ( 5 , secData , "plpoe" , getValueOr ( secData , "plpop" , defValR ) ) , getValueOr ( secData , "plma" , defValC ) , // P . S . Set default value as " S " ( Cancelled )
getValueOr ( secData , "plds" , defValC ) , // P . S . Set default value as " R " ( Cancelled )
formatNumStr ( 5 , secData , "plrs" , defValR ) , formatNumStr ( 5 , secData , "plrd" , defValR ) , formatNumStr ( 5 , secData , "pldp" , defValR ) , // P . S . Set default value as " 7 " ( Cancelled )
formatNumStr ( 5 , secData , "plmwt" , defValR ) , formatNumStr ( 5 , secData , "page" , defValR ) , formatNumStr ( 5 , secData , "plenv" , defValR ) , formatNumStr ( 5 , secData , "plph" , defValR ) , formatNumStr ( 5 , secData , "plspl" , defValR ) , getValueOr ( secData , "pl_name" , defValC ) ) ) ; } sbData . append ( "\r\n" ) ; } else if ( ! isFallow ) { sbError . append ( "! Warning: There is no plainting data in the experiment.\r\n" ) ; } // IRRIGATION AND WATER MANAGEMENT Section
if ( ! miArr . isEmpty ( ) ) { sbData . append ( "*IRRIGATION AND WATER MANAGEMENT\r\n" ) ; for ( int idx = 0 ; idx < miArr . size ( ) ; idx ++ ) { // secData = ( ArrayList ) miArr . get ( idx ) ;
ArrayList < HashMap > subDataArr = miArr . get ( idx ) ; HashMap subData ; if ( ! subDataArr . isEmpty ( ) ) { subData = subDataArr . get ( 0 ) ; } else { subData = new HashMap ( ) ; } sbData . append ( "@I EFIR IDEP ITHR IEPT IOFF IAME IAMT IRNAME\r\n" ) ; sbData . append ( String . format ( "%1$2s %2$5s %3$5s %4$5s %5$5s %6$5s %7$5s %8$5s %9$s\r\n" , idx + 1 , formatNumStr ( 5 , subData , "ireff" , defValR ) , formatNumStr ( 5 , subData , "irmdp" , defValR ) , formatNumStr ( 5 , subData , "irthr" , defValR ) , formatNumStr ( 5 , subData , "irept" , defValR ) , getValueOr ( subData , "irstg" , defValC ) , getValueOr ( subData , "iame" , defValC ) , formatNumStr ( 5 , subData , "iamt" , defValR ) , getValueOr ( subData , "ir_name" , defValC ) ) ) ; if ( ! subDataArr . isEmpty ( ) ) { sbData . append ( "@I IDATE IROP IRVAL\r\n" ) ; } for ( HashMap subDataArr1 : subDataArr ) { subData = subDataArr1 ; String brokenMark = "" ; if ( getValueOr ( subData , "date" , defValD ) . equals ( defValD ) ) { brokenMark = "!" ; } sbData . append ( brokenMark ) . append ( String . format ( "%1$2s %2$5s %3$-5s %4$5s\r\n" , idx + 1 , formatDateStr ( getValueOr ( subData , "date" , defValD ) ) , // P . S . idate - > date
getValueOr ( subData , "irop" , defValC ) , formatNumStr ( 5 , subData , "irval" , defValR ) ) ) ; } } sbData . append ( "\r\n" ) ; } // FERTILIZERS ( INORGANIC ) Section
if ( ! mfArr . isEmpty ( ) ) { sbData . append ( "*FERTILIZERS (INORGANIC)\r\n" ) ; sbData . append ( "@F FDATE FMCD FACD FDEP FAMN FAMP FAMK FAMC FAMO FOCD FERNAME\r\n" ) ; // String fen _ tot = getValueOr ( result , " fen _ tot " , defValR ) ;
// String fep _ tot = getValueOr ( result , " fep _ tot " , defValR ) ;
// String fek _ tot = getValueOr ( result , " fek _ tot " , defValR ) ;
// String pdate = getPdate ( result ) ;
// if ( pdate . equals ( " " ) ) {
// pdate = defValD ;
for ( int idx = 0 ; idx < mfArr . size ( ) ; idx ++ ) { ArrayList < HashMap > secDataArr = mfArr . get ( idx ) ; for ( HashMap secData : secDataArr ) { // if ( getValueOr ( secData , " fdate " , " " ) . equals ( " " ) ) {
// sbError . append ( " ! Warning : missing data : [ fdate ] , and will automatically use planting value ' " ) . append ( pdate ) . append ( " ' \ r \ n " ) ;
// if ( getValueOr ( secData , " fecd " , " " ) . equals ( " " ) ) {
// sbError . append ( " ! Warning : missing data : [ fecd ] , and will automatically use default value ' FE001 ' \ r \ n " ) ;
// if ( getValueOr ( secData , " feacd " , " " ) . equals ( " " ) ) {
// sbError . append ( " ! Warning : missing data : [ feacd ] , and will automatically use default value ' AP002 ' \ r \ n " ) ;
// if ( getValueOr ( secData , " fedep " , " " ) . equals ( " " ) ) {
// sbError . append ( " ! Warning : missing data : [ fedep ] , and will automatically use default value ' 10 ' \ r \ n " ) ;
// if ( getValueOr ( secData , " feamn " , " " ) . equals ( " " ) ) {
// sbError . append ( " ! Warning : missing data : [ feamn ] , and will automatically use the value of FEN _ TOT , ' " ) . append ( fen _ tot ) . append ( " ' \ r \ n " ) ;
// if ( getValueOr ( secData , " feamp " , " " ) . equals ( " " ) ) {
// sbError . append ( " ! Warning : missing data : [ feamp ] , and will automatically use the value of FEP _ TOT , ' " ) . append ( fep _ tot ) . append ( " ' \ r \ n " ) ;
// if ( getValueOr ( secData , " feamk " , " " ) . equals ( " " ) ) {
// sbError . append ( " ! Warning : missing data : [ feamk ] , and will automatically use the value of FEK _ TOT , ' " ) . append ( fek _ tot ) . append ( " ' \ r \ n " ) ;
String brokenMark = "" ; if ( getValueOr ( secData , "date" , defValD ) . equals ( defValD ) ) { brokenMark = "!" ; } sbData . append ( brokenMark ) . append ( String . format ( "%1$2s %2$5s %3$5s %4$5s %5$5s %6$5s %7$5s %8$5s %9$5s %10$5s %11$5s %12$s\r\n" , idx + 1 , formatDateStr ( getValueOr ( secData , "date" , defValD ) ) , // P . S . fdate - > date
getValueOr ( secData , "fecd" , defValC ) , // P . S . Set default value as " FE005 " ( Cancelled )
getValueOr ( secData , "feacd" , defValC ) , // P . S . Set default value as " AP002 " ( Cancelled )
formatNumStr ( 5 , secData , "fedep" , defValR ) , // P . S . Set default value as " 10 " ( Cancelled )
formatNumStr ( 5 , secData , "feamn" , "0" ) , // P . S . Set default value to use 0 instead of - 99
formatNumStr ( 5 , secData , "feamp" , defValR ) , // P . S . Set default value to use the value of FEP _ TOT in meta data ( Cancelled )
formatNumStr ( 5 , secData , "feamk" , defValR ) , // P . S . Set default value to use the value of FEK _ TOT in meta data ( Cancelled )
formatNumStr ( 5 , secData , "feamc" , defValR ) , formatNumStr ( 5 , secData , "feamo" , defValR ) , getValueOr ( secData , "feocd" , defValC ) , getValueOr ( secData , "fe_name" , defValC ) ) ) ; } } sbData . append ( "\r\n" ) ; } // RESIDUES AND ORGANIC FERTILIZER Section
if ( ! mrArr . isEmpty ( ) ) { sbData . append ( "*RESIDUES AND ORGANIC FERTILIZER\r\n" ) ; sbData . append ( "@R RDATE RCOD RAMT RESN RESP RESK RINP RDEP RMET RENAME\r\n" ) ; for ( int idx = 0 ; idx < mrArr . size ( ) ; idx ++ ) { ArrayList < HashMap > secDataArr = mrArr . get ( idx ) ; for ( HashMap secData : secDataArr ) { String brokenMark = "" ; if ( getValueOr ( secData , "date" , defValD ) . equals ( defValD ) ) { brokenMark = "!" ; } sbData . append ( brokenMark ) . append ( String . format ( "%1$2s %2$5s %3$-5s %4$5s %5$5s %6$5s %7$5s %8$5s %9$5s %10$5s %11$s\r\n" , idx + 1 , formatDateStr ( getValueOr ( secData , "date" , defValD ) ) , // P . S . omdat - > date
getValueOr ( secData , "omcd" , defValC ) , formatNumStr ( 5 , secData , "omamt" , defValR ) , formatNumStr ( 5 , secData , "omn%" , defValR ) , formatNumStr ( 5 , secData , "omp%" , defValR ) , formatNumStr ( 5 , secData , "omk%" , defValR ) , formatNumStr ( 5 , secData , "ominp" , defValR ) , formatNumStr ( 5 , secData , "omdep" , defValR ) , formatNumStr ( 5 , secData , "omacd" , defValR ) , getValueOr ( secData , "om_name" , defValC ) ) ) ; } } sbData . append ( "\r\n" ) ; } // CHEMICAL APPLICATIONS Section
if ( ! mcArr . isEmpty ( ) ) { sbData . append ( "*CHEMICAL APPLICATIONS\r\n" ) ; sbData . append ( "@C CDATE CHCOD CHAMT CHME CHDEP CHT..CHNAME\r\n" ) ; for ( int idx = 0 ; idx < mcArr . size ( ) ; idx ++ ) { ArrayList < HashMap > secDataArr = mcArr . get ( idx ) ; for ( HashMap secData : secDataArr ) { String brokenMark = "" ; if ( getValueOr ( secData , "date" , defValD ) . equals ( defValD ) ) { brokenMark = "!" ; } sbData . append ( brokenMark ) . append ( String . format ( "%1$2s %2$5s %3$5s %4$5s %5$5s %6$5s %7$5s %8$s\r\n" , idx + 1 , formatDateStr ( getValueOr ( secData , "date" , defValD ) ) , // P . S . cdate - > date
getValueOr ( secData , "chcd" , defValC ) , formatNumStr ( 5 , secData , "chamt" , defValR ) , getValueOr ( secData , "chacd" , defValC ) , getValueOr ( secData , "chdep" , defValC ) , getValueOr ( secData , "ch_targets" , defValC ) , getValueOr ( secData , "ch_name" , defValC ) ) ) ; } } sbData . append ( "\r\n" ) ; } // TILLAGE Section
if ( ! mtArr . isEmpty ( ) ) { sbData . append ( "*TILLAGE AND ROTATIONS\r\n" ) ; sbData . append ( "@T TDATE TIMPL TDEP TNAME\r\n" ) ; for ( int idx = 0 ; idx < mtArr . size ( ) ; idx ++ ) { ArrayList < HashMap > secDataArr = mtArr . get ( idx ) ; for ( HashMap secData : secDataArr ) { String brokenMark = "" ; if ( getValueOr ( secData , "date" , defValD ) . equals ( defValD ) ) { brokenMark = "!" ; } sbData . append ( brokenMark ) . append ( String . format ( "%1$2s %2$5s %3$5s %4$5s %5$s\r\n" , idx + 1 , formatDateStr ( getValueOr ( secData , "date" , defValD ) ) , // P . S . tdate - > date
getValueOr ( secData , "tiimp" , defValC ) , formatNumStr ( 5 , secData , "tidep" , defValR ) , getValueOr ( secData , "ti_name" , defValC ) ) ) ; } } sbData . append ( "\r\n" ) ; } // ENVIRONMENT MODIFICATIONS Section
if ( ! meArr . isEmpty ( ) ) { sbData . append ( "*ENVIRONMENT MODIFICATIONS\r\n" ) ; sbData . append ( "@E ODATE EDAY ERAD EMAX EMIN ERAIN ECO2 EDEW EWIND ENVNAME\r\n" ) ; for ( int idx = 0 , cnt = 1 ; idx < meArr . size ( ) ; idx ++ ) { ArrayList < HashMap > secDataArr = meArr . get ( idx ) ; for ( HashMap secData : secDataArr ) { if ( secData . containsKey ( "em_data" ) ) { sbData . append ( String . format ( "%1$2s %2$s\r\n" , cnt , getValueOr ( secData , "em_data" , "" ) . trim ( ) ) ) ; } else { String brokenMark = "" ; if ( getValueOr ( secData , "date" , defValD ) . equals ( defValD ) ) { brokenMark = "!" ; } sbData . append ( brokenMark ) . append ( String . format ( "%1$2s %2$5s %3$-1s%4$4s %5$-1s%6$4s %7$-1s%8$4s %9$-1s%10$4s %11$-1s%12$4s %13$-1s%14$4s %15$-1s%16$4s %17$-1s%18$4s %19$s\r\n" , idx + 1 , formatDateStr ( getValueOr ( secData , "date" , defValD ) ) , // P . S . emday - > date
getValueOr ( secData , "ecdyl" , "A" ) , formatNumStr ( 4 , secData , "emdyl" , "0" ) , getValueOr ( secData , "ecrad" , "A" ) , formatNumStr ( 4 , secData , "emrad" , "0" ) , getValueOr ( secData , "ecmax" , "A" ) , formatNumStr ( 4 , secData , "emmax" , "0" ) , getValueOr ( secData , "ecmin" , "A" ) , formatNumStr ( 4 , secData , "emmin" , "0" ) , getValueOr ( secData , "ecrai" , "A" ) , formatNumStr ( 4 , secData , "emrai" , "0" ) , getValueOr ( secData , "ecco2" , "A" ) , formatNumStr ( 4 , secData , "emco2" , "0" ) , getValueOr ( secData , "ecdew" , "A" ) , formatNumStr ( 4 , secData , "emdew" , "0" ) , getValueOr ( secData , "ecwnd" , "A" ) , formatNumStr ( 4 , secData , "emwnd" , "0" ) , getValueOr ( secData , "em_name" , defValC ) ) ) ; } } } sbData . append ( "\r\n" ) ; } // HARVEST DETAILS Section
if ( ! mhArr . isEmpty ( ) ) { sbData . append ( "*HARVEST DETAILS\r\n" ) ; sbData . append ( "@H HDATE HSTG HCOM HSIZE HPC HBPC HNAME\r\n" ) ; for ( int idx = 0 ; idx < mhArr . size ( ) ; idx ++ ) { ArrayList < HashMap > secDataArr = mhArr . get ( idx ) ; for ( HashMap secData : secDataArr ) { String brokenMark = "" ; if ( getValueOr ( secData , "date" , defValD ) . equals ( defValD ) ) { brokenMark = "!" ; } sbData . append ( brokenMark ) . append ( String . format ( "%1$2s %2$5s %3$-5s %4$-5s %5$-5s %6$5s %7$5s %8$s\r\n" , idx + 1 , formatDateStr ( getValueOr ( secData , "date" , defValD ) ) , // P . S . hdate - > date
getValueOr ( secData , "hastg" , defValC ) , getValueOr ( secData , "hacom" , defValC ) , getValueOr ( secData , "hasiz" , defValC ) , formatNumStr ( 5 , secData , "hap%" , defValR ) , formatNumStr ( 5 , secData , "hab%" , defValR ) , getValueOr ( secData , "ha_name" , defValC ) ) ) ; } } sbData . append ( "\r\n" ) ; } // SIMULATION CONTROLS and AUTOMATIC MANAGEMENT Section
if ( ! smArr . isEmpty ( ) ) { // Loop all the simulation control records
sbData . append ( "*SIMULATION CONTROLS\r\n" ) ; for ( int idx = 0 ; idx < smArr . size ( ) ; idx ++ ) { HashMap secData = smArr . get ( idx ) ; sbData . append ( createSMMAStr ( idx + 1 , secData ) ) ; } } else { sbData . append ( "*SIMULATION CONTROLS\r\n" ) ; sbData . append ( createSMMAStr ( 1 , new HashMap ( ) ) ) ; } // DOME Info Section
if ( isAnyDomeApplied ) { sbDomeData . append ( "! APPLIED DOME INFO\r\n" ) ; for ( String exname : appliedDomes . keySet ( ) ) { if ( ! getValueOr ( appliedDomes , exname , "" ) . equals ( "" ) ) { sbDomeData . append ( "! " ) . append ( exname ) . append ( "\t" ) ; sbDomeData . append ( appliedDomes . get ( exname ) ) ; sbDomeData . append ( "\r\n" ) ; } } sbDomeData . append ( "\r\n" ) ; } // Output finish
bwX . write ( sbError . toString ( ) ) ; bwX . write ( sbDomeData . toString ( ) ) ; bwX . write ( sbGenData . toString ( ) ) ; bwX . write ( sbNotesData . toString ( ) ) ; bwX . write ( sbData . toString ( ) ) ; bwX . close ( ) ; } catch ( IOException e ) { LOG . error ( DssatCommonOutput . getStackTrace ( e ) ) ; } |
public class CommerceAddressPersistenceImpl { /** * Returns the commerce addresses before and after the current commerce address in the ordered set where groupId = & # 63 ; and classNameId = & # 63 ; and classPK = & # 63 ; .
* @ param commerceAddressId the primary key of the current commerce address
* @ param groupId the group ID
* @ param classNameId the class name ID
* @ param classPK the class pk
* @ param orderByComparator the comparator to order the set by ( optionally < code > null < / code > )
* @ return the previous , current , and next commerce address
* @ throws NoSuchAddressException if a commerce address with the primary key could not be found */
@ Override public CommerceAddress [ ] findByG_C_C_PrevAndNext ( long commerceAddressId , long groupId , long classNameId , long classPK , OrderByComparator < CommerceAddress > orderByComparator ) throws NoSuchAddressException { } } | CommerceAddress commerceAddress = findByPrimaryKey ( commerceAddressId ) ; Session session = null ; try { session = openSession ( ) ; CommerceAddress [ ] array = new CommerceAddressImpl [ 3 ] ; array [ 0 ] = getByG_C_C_PrevAndNext ( session , commerceAddress , groupId , classNameId , classPK , orderByComparator , true ) ; array [ 1 ] = commerceAddress ; array [ 2 ] = getByG_C_C_PrevAndNext ( session , commerceAddress , groupId , classNameId , classPK , orderByComparator , false ) ; return array ; } catch ( Exception e ) { throw processException ( e ) ; } finally { closeSession ( session ) ; } |
public class JSPUtil { /** * Method used to generate the txt based Job table for Job pages .
* @ param jobs vector of jobs to be displayed in table .
* @ param colSeparator the char used to separate columns
* @ param rowSeparator the char used to separate records
* @ return a String contains the table
* @ throws IOException */
public static String generateTxtJobTable ( Collection < JobInProgress > jobs , JobTracker tracker ) throws IOException { } } | char colSeparator = '\t' ; char rowSeparator = '\n' ; StringBuffer sb = new StringBuffer ( ) ; sb . append ( "01.JOBID" + colSeparator + "02.START" + colSeparator + "03.FINISH" + colSeparator + "04.USER" + colSeparator + "05.NAME" + colSeparator + "06.BLACK_TT" + colSeparator + "07.PRIORITY" + colSeparator + "08.MAP_TOTAL" + colSeparator + "09.MAP_COMPLETE" + colSeparator + "10.MAP_RUN" + colSeparator + "11.MAP_SPECU" + colSeparator + "12.MAP_NONLOC" + colSeparator + "13.MAP_KILLED" + colSeparator + "14.MAP_FAILED" + colSeparator + "15.RED_TOTAL" + colSeparator + "16.RED_COMPLETE" + colSeparator + "17.RED_RUN" + colSeparator + "18.RED_SPECU" + colSeparator + "19.RED_KILLED" + colSeparator + "20.RED_FAILED" + colSeparator + "21.%MEM" + colSeparator + "22.%MEM_MAX" + colSeparator + "23.%MEM_PEAK" + colSeparator + "24.MEM_MS" + colSeparator + "25.%CPU" + colSeparator + "26.%CPU_MAX" + colSeparator + "27.CPU_MS" + rowSeparator ) ; if ( jobs . size ( ) > 0 ) { for ( Iterator < JobInProgress > it = jobs . iterator ( ) ; it . hasNext ( ) ; ) { JobInProgress job = it . next ( ) ; JobProfile profile = job . getProfile ( ) ; String user = profile . getUser ( ) ; String name = profile . getJobName ( ) . replace ( ' ' , '_' ) . replace ( '\t' , '_' ) . replace ( '\n' , '_' ) ; int desiredMaps = job . desiredMaps ( ) ; int desiredReduces = job . desiredReduces ( ) ; int runningMaps = 0 ; int failedMaps = 0 ; int killedMaps = 0 ; for ( TaskInProgress tip : job . getTasks ( TaskType . MAP ) ) { if ( tip . isRunning ( ) ) { runningMaps += tip . getActiveTasks ( ) . size ( ) ; tip . numKilledTasks ( ) ; failedMaps += tip . numTaskFailures ( ) ; killedMaps += tip . numKilledTasks ( ) ; } } int runningReduces = 0 ; int failedReduces = 0 ; int killedReduces = 0 ; for ( TaskInProgress tip : job . getTasks ( TaskType . REDUCE ) ) { if ( tip . isRunning ( ) ) { runningReduces += tip . getActiveTasks ( ) . size ( ) ; failedReduces += tip . numTaskFailures ( ) ; killedReduces += tip . numKilledTasks ( ) ; } } int completedMaps = job . finishedMaps ( ) ; int completedReduces = job . finishedReduces ( ) ; int nonLocalRunningMaps = job . getNonLocalRunningMaps ( ) . size ( ) ; long submitTime = job . getStartTime ( ) ; long finishTime = job . getFinishTime ( ) ; String jobpri = job . getPriority ( ) . toString ( ) ; JobID jobId = job . getJobID ( ) ; double mem = 0 , memMax = 0 , memMaxPeak = 0 , memCost = 0 ; double cpu = 0 , cpuMax = 0 , cpuCost = 0 ; ResourceReporter reporter = tracker . getResourceReporter ( ) ; if ( reporter != null ) { mem = reporter . getJobCpuPercentageOnCluster ( jobId ) ; memMax = reporter . getJobMemMaxPercentageOnBox ( jobId ) ; memMaxPeak = reporter . getJobMemMaxPercentageOnBoxAllTime ( jobId ) ; memCost = reporter . getJobMemCumulatedUsageTime ( jobId ) ; cpu = reporter . getJobCpuPercentageOnCluster ( jobId ) ; cpuMax = reporter . getJobCpuMaxPercentageOnBox ( jobId ) ; cpuCost = reporter . getJobCpuCumulatedUsageTime ( jobId ) ; } sb . append ( jobId . toString ( ) + colSeparator + submitTime + colSeparator + finishTime + colSeparator + user + colSeparator + name + colSeparator + job . getNoOfBlackListedTrackers ( ) + colSeparator + jobpri + colSeparator + desiredMaps + colSeparator + completedMaps + colSeparator + runningMaps + colSeparator + job . speculativeMapTasks + colSeparator + nonLocalRunningMaps + colSeparator + killedMaps + colSeparator + failedMaps + colSeparator + desiredReduces + colSeparator + completedReduces + colSeparator + runningReduces + colSeparator + job . speculativeReduceTasks + colSeparator + killedReduces + colSeparator + failedReduces + colSeparator + mem + colSeparator + memMax + colSeparator + memMaxPeak + colSeparator + memCost + colSeparator + cpu + colSeparator + cpuMax + colSeparator + cpuCost + rowSeparator ) ; } } return sb . toString ( ) ; |
public class SignatureRequest { /** * Utility method that allows you to search for a Signature object on this
* request by email and name . It requires both because neither alone is
* enough to guarantee uniqueness ( some requests can have multiple signers
* using the same email address or name ) .
* @ param email String
* @ param name String
* @ return Signature , if found on this request , or null
* @ deprecated Use getSignature ( email , name ) */
public Signature getSignatureBySigner ( String email , String name ) { } } | if ( email == null || name == null ) { return null ; } for ( Signature s : getSignatures ( ) ) { if ( name . equalsIgnoreCase ( s . getName ( ) ) && email . equalsIgnoreCase ( s . getEmail ( ) ) ) { return s ; } } return null ; |
public class AbstractStream { @ Override public < R > Stream < R > flattMap ( final Function < ? super T , ? extends Collection < ? extends R > > mapper ) { } } | return flatMap ( new Function < T , Stream < ? extends R > > ( ) { @ Override public Stream < ? extends R > apply ( T t ) { return Stream . of ( mapper . apply ( t ) ) ; } } ) ; |
public class CommerceTierPriceEntryUtil { /** * Returns the first commerce tier price entry in the ordered set where companyId = & # 63 ; .
* @ param companyId the company ID
* @ param orderByComparator the comparator to order the set by ( optionally < code > null < / code > )
* @ return the first matching commerce tier price entry , or < code > null < / code > if a matching commerce tier price entry could not be found */
public static CommerceTierPriceEntry fetchByCompanyId_First ( long companyId , OrderByComparator < CommerceTierPriceEntry > orderByComparator ) { } } | return getPersistence ( ) . fetchByCompanyId_First ( companyId , orderByComparator ) ; |
public class SessionAffinityManagerImpl { /** * Method used to get at the SSL Id rather than the displayed Id */
public String getActualSSLSessionId ( ServletRequest request ) { } } | String sessionID = null ; byte [ ] sslBytes = ( ( IExtendedRequest ) request ) . getSSLId ( ) ; if ( sslBytes != null ) { sessionID = SessionIDGeneratorImpl . convertSessionIdBytesToSessionId ( sslBytes , SessionManagerConfig . getSessionIDLength ( ) ) ; } return sessionID ; |
public class ArrayLabelSetterFactory { /** * フィールドによるラベル情報を格納する場合 。
* < p > { @ code < フィールド名 > + Label } のメソッド名 < / p >
* @ param beanClass フィールドが定義してあるクラスのインスタンス
* @ param fieldName フィールド名
* @ return ラベル情報の設定用クラス */
private Optional < ArrayLabelSetter > createField ( final Class < ? > beanClass , final String fieldName ) { } } | final String labelFieldName = fieldName + "Label" ; final Field labelField ; try { labelField = beanClass . getDeclaredField ( labelFieldName ) ; labelField . setAccessible ( true ) ; } catch ( NoSuchFieldException | SecurityException e ) { return Optional . empty ( ) ; } if ( ! List . class . isAssignableFrom ( labelField . getType ( ) ) ) { return Optional . empty ( ) ; } final ParameterizedType type = ( ParameterizedType ) labelField . getGenericType ( ) ; final Class < ? > valueType = ( Class < ? > ) type . getActualTypeArguments ( ) [ 0 ] ; if ( valueType . equals ( String . class ) ) { return Optional . of ( new ArrayLabelSetter ( ) { @ SuppressWarnings ( "unchecked" ) @ Override public void set ( final Object beanObj , final String label , final int index ) { ArgUtils . notNull ( beanObj , "beanObj" ) ; ArgUtils . notEmpty ( label , "label" ) ; try { List < String > labelListObj = ( List < String > ) labelField . get ( beanObj ) ; if ( labelListObj == null ) { labelListObj = new ArrayList < > ( ) ; labelField . set ( beanObj , labelListObj ) ; } Utils . addListWithIndex ( labelListObj , label , index ) ; } catch ( IllegalArgumentException | IllegalAccessException e ) { throw new RuntimeException ( "fail access label field." , e ) ; } } } ) ; } return Optional . empty ( ) ; |
public class GetFederationTokenRequestMarshaller { /** * Marshall the given parameter object . */
public void marshall ( GetFederationTokenRequest getFederationTokenRequest , ProtocolMarshaller protocolMarshaller ) { } } | if ( getFederationTokenRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( getFederationTokenRequest . getInstanceId ( ) , INSTANCEID_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class StepMacro { /** * capturing group from the macroMatcher */
private String replaceGroupsInMacroStep ( Matcher macroMatcher , String action ) { } } | Matcher groupMatcher = groupPattern . matcher ( action ) ; while ( groupMatcher . find ( ) ) { String match = groupMatcher . group ( ) ; String groupString = match . substring ( 2 , match . length ( ) - 1 ) ; int groupId = Integer . parseInt ( groupString ) ; if ( groupId > macroMatcher . groupCount ( ) ) { throw new ChorusException ( "Capture group with index " + groupId + " in StepMacro step '" + action + "' did not have a matching capture group in the pattern '" + pattern . toString ( ) + "'" ) ; } String replacement = macroMatcher . group ( groupId ) ; replacement = RegexpUtils . escapeRegexReplacement ( replacement ) ; log . trace ( "Replacing group " + match + " with " + replacement + " in action " + action ) ; action = action . replaceFirst ( "<\\$" + groupId + ">" , replacement ) ; } return action ; |
public class Ci_HelpTable { /** * Processes general help with no specific command requested .
* @ param at table to add help information to */
protected void generalHelp ( AsciiTable at ) { } } | // collect all commands belonging to a particular category
String defKey = "__standard" ; Map < String , TreeMap < String , SkbShellCommand > > cat2Cmd = new TreeMap < > ( ) ; for ( CommandInterpreter ci : this . skbShell . getCommandMap ( ) . values ( ) ) { for ( SkbShellCommand ssc : ci . getCommands ( ) . values ( ) ) { String cat = defKey ; if ( ssc . getCategory ( ) != null ) { cat = ssc . getCategory ( ) . getCategory ( ) ; } if ( ! cat2Cmd . containsKey ( cat ) ) { cat2Cmd . put ( cat , new TreeMap < > ( ) ) ; } cat2Cmd . get ( cat ) . put ( ssc . getCommand ( ) , ssc ) ; } } // no argument , means general help
at . addRow ( null , this . skbShell . getDisplayName ( ) + "-" + this . skbShell . getDescription ( ) ) ; at . addRule ( ) ; // do the commands per category , starting with " _ _ standard "
for ( String cat : cat2Cmd . keySet ( ) ) { String catDescr = cat ; if ( defKey . equals ( cat ) ) { catDescr = "standard commands" ; } at . addRow ( catDescr , new StrBuilder ( ) . appendWithSeparators ( cat2Cmd . get ( cat ) . keySet ( ) , ", " ) ) ; at . addRule ( ) ; } at . addRow ( null , "try: 'help <command>' for more details" ) ; |
public class ProjectEnvironment { /** * A set of environment variables to make available to builds for this build project .
* @ param environmentVariables
* A set of environment variables to make available to builds for this build project . */
public void setEnvironmentVariables ( java . util . Collection < EnvironmentVariable > environmentVariables ) { } } | if ( environmentVariables == null ) { this . environmentVariables = null ; return ; } this . environmentVariables = new java . util . ArrayList < EnvironmentVariable > ( environmentVariables ) ; |
public class ObjectValidator { /** * The object must have a datastream to match each dsTypeModel in the
* content model . Matching a dsTypeModel means equal IDs and an acceptable
* form . */
private void confirmMatchForDsTypeModel ( ValidationResult result , DsTypeModel typeModel , String contentModelPid , ObjectInfo object ) { } } | String id = typeModel . getId ( ) ; DatastreamInfo dsInfo = object . getDatastreamInfo ( id ) ; if ( dsInfo == null ) { // If there is no datastream by that name , nothing to check .
result . addNote ( ValidationResultNotation . noMatchingDatastreamId ( contentModelPid , id ) ) ; return ; } Collection < Form > forms = typeModel . getForms ( ) ; if ( forms . isEmpty ( ) ) { // If the type model has no forms , it ' s an automatic match .
return ; } // Otherwise , the datastream must meet the constraints of at least one form .
for ( Form form : forms ) { if ( meetsConstraint ( dsInfo . getMimeType ( ) , form . getMimeType ( ) ) && meetsConstraint ( dsInfo . getFormatUri ( ) , form . getFormatUri ( ) ) ) { return ; } } result . addNote ( ValidationResultNotation . datastreamDoesNotMatchForms ( contentModelPid , id ) ) ; |
public class AbstractElementVisitor6 { /** * { @ inheritDoc }
* @ implSpec Visits a { @ code ModuleElement } by calling { @ code
* visitUnknown } .
* @ param e { @ inheritDoc }
* @ param p { @ inheritDoc }
* @ return the result of { @ code visitUnknown }
* @ since 9
* @ spec JPMS */
@ Override public R visitModule ( ModuleElement e , P p ) { } } | // Use implementation from interface default method
return ElementVisitor . super . visitModule ( e , p ) ; |
public class DataEditorApplicationConfig { /** * views */
@ Bean public WidgetViewDescriptor itemView ( ) { } } | return new WidgetViewDescriptor ( "itemView" , new WidgetProvider < Widget > ( ) { @ Override public Widget getWidget ( ) { return itemDataEditor ( ) ; } } ) ; |
public class WatirUtils { /** * Sanitize the given filename / path , so that ( 1 ) it is not surrounded with quotation marks " " , ( 2)
* all occurences of " \ \ " are changed to a " / " , and ( 3 ) all occurences of " \ " are changed to a
* @ param input the input path
* @ return a StringBuffer containing the sanitized input path */
public static StringBuffer sanitizePath ( String input ) { } } | StringBuffer buf = new StringBuffer ( ) ; // Strip any surrounding quotation marks , that might have came with the file name from
// any external source like the Windows environment variable .
if ( input . matches ( "^\".+\"$" ) ) { input = input . substring ( 1 , input . length ( ) - 1 ) ; } // Make sure we use " / " as the path separator , seems to be the best solution . Also , strip double
// " \ \ " used as separators .
input = input . replaceAll ( "\\\\\\\\" , "/" ) ; input = input . replaceAll ( "\\\\" , "/" ) ; buf . append ( input ) ; return buf ; |
public class RunScheduledInstancesResult { /** * The IDs of the newly launched instances .
* @ return The IDs of the newly launched instances . */
public java . util . List < String > getInstanceIdSet ( ) { } } | if ( instanceIdSet == null ) { instanceIdSet = new com . amazonaws . internal . SdkInternalList < String > ( ) ; } return instanceIdSet ; |
public class ObjectFactory { /** * Create an instance of { @ link JAXBElement } { @ code < }
* { @ link EnumIncludeRelationships } { @ code > } */
@ XmlElementDecl ( namespace = "http://docs.oasis-open.org/ns/cmis/messaging/200908/" , name = "includeRelationships" , scope = GetObjectParents . class ) public JAXBElement < EnumIncludeRelationships > createGetObjectParentsIncludeRelationships ( EnumIncludeRelationships value ) { } } | return new JAXBElement < EnumIncludeRelationships > ( _GetObjectOfLatestVersionIncludeRelationships_QNAME , EnumIncludeRelationships . class , GetObjectParents . class , value ) ; |
public class DynamicByteBufferHelper { /** * Insert double into .
* @ param array the array
* @ param index the index
* @ param value the value
* @ return the byte [ ] */
public static byte [ ] insertDoubleInto ( byte [ ] array , int index , double value ) { } } | byte [ ] holder = new byte [ 4 ] ; doubleTo ( holder , 0 , value ) ; return insert ( array , index , holder ) ; |
public class FedoraResourceImpl { /** * Check the SPARQLUpdate statements for the invalid interaction model changes .
* @ param request the UpdateRequest
* @ throws InteractionModelViolationException when attempting to change the interaction model */
private void checkInteractionModel ( final UpdateRequest request ) { } } | final List < Quad > deleteQuads = new ArrayList < > ( ) ; final List < Quad > updateQuads = new ArrayList < > ( ) ; for ( final Update operation : request . getOperations ( ) ) { if ( operation instanceof UpdateModify ) { final UpdateModify op = ( UpdateModify ) operation ; deleteQuads . addAll ( op . getDeleteQuads ( ) ) ; updateQuads . addAll ( op . getInsertQuads ( ) ) ; } else if ( operation instanceof UpdateData ) { final UpdateData op = ( UpdateData ) operation ; updateQuads . addAll ( op . getQuads ( ) ) ; } else if ( operation instanceof UpdateDeleteWhere ) { final UpdateDeleteWhere op = ( UpdateDeleteWhere ) operation ; deleteQuads . addAll ( op . getQuads ( ) ) ; } final Optional < String > resourceInteractionModel = getResourceInteraction ( ) ; if ( resourceInteractionModel . isPresent ( ) ) { updateQuads . forEach ( e -> { // check for interaction model change violation
checkInteractionModel ( e . asTriple ( ) , resourceInteractionModel ) ; } ) ; } deleteQuads . forEach ( e -> { final String interactionModel = getInteractionModel . apply ( e . asTriple ( ) ) ; if ( StringUtils . isNotBlank ( interactionModel ) ) { throw new InteractionModelViolationException ( "Deleting the interaction model " + interactionModel + " is not allowed!" ) ; } } ) ; } |
public class DescribeClientVpnEndpointsResult { /** * Information about the Client VPN endpoints .
* < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use
* { @ link # setClientVpnEndpoints ( java . util . Collection ) } or { @ link # withClientVpnEndpoints ( java . util . Collection ) } if
* you want to override the existing values .
* @ param clientVpnEndpoints
* Information about the Client VPN endpoints .
* @ return Returns a reference to this object so that method calls can be chained together . */
public DescribeClientVpnEndpointsResult withClientVpnEndpoints ( ClientVpnEndpoint ... clientVpnEndpoints ) { } } | if ( this . clientVpnEndpoints == null ) { setClientVpnEndpoints ( new com . amazonaws . internal . SdkInternalList < ClientVpnEndpoint > ( clientVpnEndpoints . length ) ) ; } for ( ClientVpnEndpoint ele : clientVpnEndpoints ) { this . clientVpnEndpoints . add ( ele ) ; } return this ; |
public class ClassFileWriter { /** * Generate code to load the given integer on stack .
* @ param k the constant */
public void addPush ( int k ) { } } | if ( ( byte ) k == k ) { if ( k == - 1 ) { add ( ByteCode . ICONST_M1 ) ; } else if ( 0 <= k && k <= 5 ) { add ( ( byte ) ( ByteCode . ICONST_0 + k ) ) ; } else { add ( ByteCode . BIPUSH , ( byte ) k ) ; } } else if ( ( short ) k == k ) { add ( ByteCode . SIPUSH , ( short ) k ) ; } else { addLoadConstant ( k ) ; } |
public class AWSSupportClient { /** * Returns communications ( and attachments ) for one or more support cases . You can use the < code > afterTime < / code >
* and < code > beforeTime < / code > parameters to filter by date . You can use the < code > caseId < / code > parameter to
* restrict the results to a particular case .
* Case data is available for 12 months after creation . If a case was created more than 12 months ago , a request for
* data might cause an error .
* You can use the < code > maxResults < / code > and < code > nextToken < / code > parameters to control the pagination of the
* result set . Set < code > maxResults < / code > to the number of cases you want displayed on each page , and use
* < code > nextToken < / code > to specify the resumption of pagination .
* @ param describeCommunicationsRequest
* @ return Result of the DescribeCommunications operation returned by the service .
* @ throws InternalServerErrorException
* An internal server error occurred .
* @ throws CaseIdNotFoundException
* The requested < code > caseId < / code > could not be located .
* @ sample AWSSupport . DescribeCommunications
* @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / support - 2013-04-15 / DescribeCommunications " target = " _ top " > AWS
* API Documentation < / a > */
@ Override public DescribeCommunicationsResult describeCommunications ( DescribeCommunicationsRequest request ) { } } | request = beforeClientExecution ( request ) ; return executeDescribeCommunications ( request ) ; |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.