signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class ProcessContext { /** * Checks if the given attribute is known , i . e . is contained in the * attribute list . * @ param attribute Attribute to be checked . * @ throws IllegalArgumentException If the given attribute is not known . */ public void validateAttribute ( String attribute ) throws CompatibilityException { } }
try { super . validateObject ( attribute ) ; } catch ( CompatibilityException e ) { throw new CompatibilityException ( "Unknown attribute: " + attribute , e ) ; }
public class MemcachedManagerBuilder { /** * Configuration based on system properties set by the memcacheAddOn */ public static MemcachedManagerBuilder memcacheAddOn ( ) { } }
final String memcacheServers = System . getenv ( "MEMCACHE_SERVERS" ) ; return memcachedConfig ( ) . username ( System . getenv ( "MEMCACHE_USERNAME" ) ) . password ( System . getenv ( "MEMCACHE_PASSWORD" ) ) . url ( memcacheServers == null ? DEFAULT_URL : memcacheServers ) ;
public class CmsJspTagEdit { /** * Creates the String specifying where which type of resource has to be created . < p > * @ param cms the CMS context * @ param resType the resource type to create * @ param creationSitemap the creation sitemap parameter * @ return The String identifying which type of resource has to be created where . < p > * @ see # createResource ( CmsObject , String , Locale , String , String , String , String ) */ public static String getNewLink ( CmsObject cms , I_CmsResourceType resType , String creationSitemap ) { } }
String contextPath = getContextRootPath ( cms , creationSitemap ) ; StringBuffer newLink = new StringBuffer ( NEW_LINK_IDENTIFIER ) ; newLink . append ( '|' ) ; newLink . append ( contextPath ) ; newLink . append ( '|' ) ; newLink . append ( resType . getTypeName ( ) ) ; return newLink . toString ( ) ;
public class SshNode { @ Override public SshNode deleteFile ( ) throws FileNotFoundException , DeleteException { } }
ChannelSftp sftp ; boolean directory ; try { sftp = alloc ( ) ; } catch ( JSchException e ) { throw new DeleteException ( this , e ) ; } try { sftp . rm ( escape ( slashPath ) ) ; } catch ( SftpException e ) { if ( e . id == ChannelSftp . SSH_FX_NO_SUCH_FILE ) { throw new FileNotFoundException ( this ) ; } try { directory = isDirectory ( ) ; } catch ( ExistsException e1 ) { directory = false ; // fall - through - report original exception } if ( directory ) { throw new FileNotFoundException ( this , e ) ; } throw new DeleteException ( this , e ) ; } finally { free ( sftp ) ; } return this ;
import java . util . Arrays ; public class CalculateMinDiff { /** * This function determines the smallest difference between any two elements in a given list . * > > > calculateMinDiff ( new int [ ] { 1 , 5 , 3 , 19 , 18 , 25 } , 6) * > > > calculateMinDiff ( new int [ ] { 4 , 3 , 2 , 6 } , 4) * > > > calculateMinDiff ( new int [ ] { 30 , 5 , 20 , 9 } , 4) * @ param array Array of integers . * @ param size Size of the given array . * @ return The smallest difference between any two elements in a given array . */ public static int calculateMinDiff ( int [ ] array , int size ) { } }
Arrays . sort ( array ) ; int minDifference = Integer . MAX_VALUE ; for ( int i = 0 ; i < size - 1 ; i ++ ) { if ( array [ i + 1 ] - array [ i ] < minDifference ) { minDifference = array [ i + 1 ] - array [ i ] ; } } return minDifference ;
public class ProjectResolver { /** * Aggregate projects to their base projects * @ param reactorProjects The current reactor projects . * @ param rulesDirectory The configured rules directory . * @ param useExecutionRootAsProjectRoot < code > true < / code > if the execution root shall be used as project root . * @ return A map containing resolved base projects and their aggregated projects . * @ throws MojoExecutionException If aggregation fails . */ static Map < MavenProject , List < MavenProject > > getProjects ( List < MavenProject > reactorProjects , String rulesDirectory , boolean useExecutionRootAsProjectRoot ) throws MojoExecutionException { } }
Map < MavenProject , List < MavenProject > > rootModules = new HashMap < > ( ) ; for ( MavenProject reactorProject : reactorProjects ) { MavenProject rootModule = ProjectResolver . getRootModule ( reactorProject , reactorProjects , rulesDirectory , useExecutionRootAsProjectRoot ) ; List < MavenProject > modules = rootModules . get ( rootModule ) ; if ( modules == null ) { modules = new ArrayList < > ( ) ; rootModules . put ( rootModule , modules ) ; } modules . add ( reactorProject ) ; } return rootModules ;
public class ApiOvhMe { /** * List of registered payment mean you can use to pay this order * REST : GET / me / order / { orderId } / availableRegisteredPaymentMean * @ param orderId [ required ] */ public ArrayList < OvhRegisteredPaymentMean > order_orderId_availableRegisteredPaymentMean_GET ( Long orderId ) throws IOException { } }
String qPath = "/me/order/{orderId}/availableRegisteredPaymentMean" ; StringBuilder sb = path ( qPath , orderId ) ; String resp = exec ( qPath , "GET" , sb . toString ( ) , null ) ; return convertTo ( resp , t6 ) ;
public class TagBalancingHtmlStreamEventReceiver { /** * True if text is the value of an inter - element whitespace text node as * defined by HTML5. * This is the kind of text that is often inserted by * HTML authors to nicely indent their HTML documents and which * ( modulo unconventional use of { @ code white - space : pre } ) are not apparent * to the end - user . */ public static boolean isInterElementWhitespace ( String text ) { } }
int n = text . length ( ) ; for ( int i = 0 ; i < n ; ++ i ) { if ( ! Strings . isHtmlSpace ( text . charAt ( i ) ) ) { return false ; } } return true ;
public class Benchmark { /** * Expanded Scaffer ' s F6 function */ static public double EScafferF6 ( double [ ] x ) { } }
double sum = 0.0 ; for ( int i = 1 ; i < x . length ; i ++ ) { sum += ScafferF6 ( x [ i - 1 ] , x [ i ] ) ; } sum += ScafferF6 ( x [ x . length - 1 ] , x [ 0 ] ) ; return ( sum ) ;
public class ExpressRouteCrossConnectionsInner { /** * Gets the currently advertised ARP table associated with the express route cross connection in a resource group . * @ param resourceGroupName The name of the resource group . * @ param crossConnectionName The name of the ExpressRouteCrossConnection . * @ param peeringName The name of the peering . * @ param devicePath The path of the device * @ param serviceCallback the async ServiceCallback to handle successful and failed responses . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the { @ link ServiceFuture } object */ public ServiceFuture < ExpressRouteCircuitsArpTableListResultInner > listArpTableAsync ( String resourceGroupName , String crossConnectionName , String peeringName , String devicePath , final ServiceCallback < ExpressRouteCircuitsArpTableListResultInner > serviceCallback ) { } }
return ServiceFuture . fromResponse ( listArpTableWithServiceResponseAsync ( resourceGroupName , crossConnectionName , peeringName , devicePath ) , serviceCallback ) ;
public class InitOnceFieldHandler { /** * Set this cloned listener to the same state at this listener . * @ param field The field this new listener will be added to . * @ param The new listener to sync to this . * @ param Has the init method been called ? * @ return True if I called init . */ public boolean syncClonedListener ( BaseField field , FieldListener listener , boolean bInitCalled ) { } }
bInitCalled = super . syncClonedListener ( field , listener , bInitCalled ) ; ( ( InitOnceFieldHandler ) listener ) . setFirstTime ( m_bFirstTime ) ; return bInitCalled ;
public class ExtendedLoggerFactory { /** * Uses the method { @ link org . slf4j . LoggerFactory # getLogger ( Class ) } to create a logger named corresponding to the * class passed as parameter . In addition , this method will configure the logging system , installing the necessary * bridges to unify logging across the entire application . * @ param clazz - the returned logger will be named after clazz * @ return A logger named corresponding to the class passed as parameter . */ public static Logger getLogger ( final Class < ? > clazz ) { } }
es . upv . grycap . coreutils . logging . LogManager . getLogManager ( ) . init ( ) ; return org . slf4j . LoggerFactory . getLogger ( requireNonNull ( clazz , "A non-null class expected" ) ) ;
public class XBasePanel { /** * Print the header info , such as title , keywords and meta - desc . * @ param out The http output stream . * @ param reg Local resource bundle . */ public void printXMLHeaderInfo ( PrintWriter out , ResourceBundle reg ) { } }
String strTitle = this . getProperty ( "title" ) ; // Menu page if ( ( strTitle == null ) || ( strTitle . length ( ) == 0 ) ) strTitle = ( ( BasePanel ) this . getScreenField ( ) ) . getTitle ( ) ; out . println ( Utility . startTag ( XMLTags . TITLE ) + strTitle + Utility . endTag ( XMLTags . TITLE ) ) ; String strKeywords = this . getKeywords ( ) ; if ( ( strKeywords != null ) && ( strKeywords . length ( ) > 0 ) ) out . println ( Utility . startTag ( XMLTags . META_KEYWORDS ) + strKeywords + Utility . endTag ( XMLTags . META_KEYWORDS ) ) ; String strDescription = strTitle ; out . println ( Utility . startTag ( XMLTags . META_DESCRIPTION ) + strDescription + Utility . endTag ( XMLTags . META_DESCRIPTION ) ) ; String redirect = this . getMetaRedirect ( ) ; if ( ( redirect != null ) && ( redirect . length ( ) > 0 ) ) out . println ( Utility . startTag ( XMLTags . META_REDIRECT ) + redirect + Utility . endTag ( XMLTags . META_REDIRECT ) ) ;
public class Math { /** * Reverses the order of the elements in the specified array . * @ param a an array to reverse . */ public static void reverse ( int [ ] a ) { } }
int i = 0 , j = a . length - 1 ; while ( i < j ) { SortUtils . swap ( a , i ++ , j -- ) ; // code for swap not shown , but easy enough }
public class MapWidget { /** * Register a < code > WorldPaintable < / code > object to be painted on the map . By doing so , the object will be painted * immediately on the correct position , and when the user navigates around , the map will automatically make sure the * < code > WorldPaintable < / code > object is re - drawn at the correct location . * If you want to draw objects in World Space , this would be the way to go . * @ param worldPaintable * The new WorldPaintable object to be rendered on the map . * @ since 1.6.0 */ @ Api public void registerWorldPaintable ( WorldPaintable worldPaintable ) { } }
if ( worldPaintable != null && ! worldPaintables . containsKey ( worldPaintable . getId ( ) ) ) { worldPaintables . put ( worldPaintable . getId ( ) , worldPaintable ) ; worldPaintable . transform ( mapModel . getMapView ( ) . getWorldViewTransformer ( ) ) ; render ( worldPaintable , RenderGroup . WORLD , RenderStatus . ALL ) ; }
public class PurgeObsoleteDataTask { /** * / * ( non - Javadoc ) * @ see java . util . TimerTask # run ( ) */ @ Override @ Transactional public void run ( ) { } }
String storedProc = "purge_obsolete_batch_data" ; boolean result = entityManager . createStoredProcedureQuery ( storedProc ) . registerStoredProcedureParameter ( 0 , Integer . class , ParameterMode . IN ) . setParameter ( 0 , 90 ) . execute ( ) ; log . info ( "executed {} : resultset ? {}" , storedProc , result ) ;
public class UpdateThingGroupsForThingRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( UpdateThingGroupsForThingRequest updateThingGroupsForThingRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( updateThingGroupsForThingRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( updateThingGroupsForThingRequest . getThingName ( ) , THINGNAME_BINDING ) ; protocolMarshaller . marshall ( updateThingGroupsForThingRequest . getThingGroupsToAdd ( ) , THINGGROUPSTOADD_BINDING ) ; protocolMarshaller . marshall ( updateThingGroupsForThingRequest . getThingGroupsToRemove ( ) , THINGGROUPSTOREMOVE_BINDING ) ; protocolMarshaller . marshall ( updateThingGroupsForThingRequest . getOverrideDynamicGroups ( ) , OVERRIDEDYNAMICGROUPS_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class StandardDdlParser { /** * Utility method to parse a statement that can be ignored . The value returned in the generic { @ link AstNode } will contain all * text between starting token and either the terminator ( if defined ) or the next statement start token . NOTE : This method * does NOT mark and add consumed fragment to parent node . * @ param tokens the { @ link DdlTokenStream } representing the tokenized DDL content ; may not be null * @ param name * @ param parentNode the parent { @ link AstNode } node ; may not be null * @ param mixinType * @ return the parsed generic { @ link AstNode } * @ throws ParsingException */ protected AstNode parseIgnorableStatement ( DdlTokenStream tokens , String name , AstNode parentNode , String mixinType ) { } }
CheckArg . isNotNull ( tokens , "tokens" ) ; CheckArg . isNotNull ( name , "name" ) ; CheckArg . isNotNull ( parentNode , "parentNode" ) ; CheckArg . isNotNull ( mixinType , "mixinType" ) ; AstNode node = nodeFactory ( ) . node ( name , parentNode , mixinType ) ; parseUntilTerminator ( tokens ) ; return node ;
public class KafkaMsgConsumer { /** * Commit the specified offsets for the specified list of topics and * partitions . * @ param tpoList * @ since 1.3.2 */ public void commit ( KafkaTopicPartitionOffset ... tpoList ) { } }
Map < String , Map < TopicPartition , OffsetAndMetadata > > topicOffsets = new HashMap < > ( ) ; for ( KafkaTopicPartitionOffset tpo : tpoList ) { Map < TopicPartition , OffsetAndMetadata > offsets = topicOffsets . get ( tpo . topic ) ; if ( offsets == null ) { offsets = new HashMap < > ( ) ; topicOffsets . put ( tpo . topic , offsets ) ; } TopicPartition tp = new TopicPartition ( tpo . topic , tpo . partition ) ; OffsetAndMetadata oam = new OffsetAndMetadata ( tpo . offset ) ; offsets . put ( tp , oam ) ; } for ( Entry < String , Map < TopicPartition , OffsetAndMetadata > > entry : topicOffsets . entrySet ( ) ) { String topic = entry . getKey ( ) ; KafkaConsumer < String , byte [ ] > consumer = _getConsumer ( topic ) ; synchronized ( consumer ) { Set < String > subscription = consumer . subscription ( ) ; if ( subscription == null || ! subscription . contains ( topic ) ) { // this consumer has not subscribed to the topic LOGGER . warn ( "Not subscribed to topic [" + topic + "] yet!" ) ; } else { try { consumer . commitSync ( entry . getValue ( ) ) ; } catch ( WakeupException e ) { } catch ( Exception e ) { throw new KafkaException ( e ) ; } } } }
public class PortalHttpServletRequestWrapper { /** * / * ( non - Javadoc ) * @ see org . apereo . portal . url . AbstractHttpServletRequestWrapper # getLocales ( ) */ @ Override public Enumeration < Locale > getLocales ( ) { } }
if ( super . getSession ( false ) == null ) { return super . getLocales ( ) ; } final IUserInstance userInstance = this . userInstanceManager . getUserInstance ( this . getWrappedRequest ( ) ) ; final LocaleManager localeManager = userInstance . getLocaleManager ( ) ; final List < Locale > locales = localeManager . getLocales ( ) ; return Collections . enumeration ( locales ) ;
public class S { /** * Turn an object ' s String representation into Camel Case * @ param obj * @ return the string result */ @ Transformer public static String camelCase ( Object obj ) { } }
if ( null == obj ) return "" ; String string = obj . toString ( ) ; // string = noAccents ( string ) ; // string = string . replaceAll ( " [ ^ \ \ w ] " , " " ) ; StringBuilder result = new StringBuilder ( string . length ( ) ) ; String [ ] sa = string . split ( " " ) ; int l = sa . length ; for ( int i = 0 ; i < l ; ++ i ) { if ( i > 0 ) result . append ( " " ) ; for ( String s : sa [ i ] . split ( "_" ) ) { result . append ( capFirst ( s ) ) ; } } return result . toString ( ) ;
public class GetContentStructureOperation { /** * Add all ' other content ' to the parent xml element */ @ Override public void processOtherContent ( Wrapper < Content > content ) { } }
if ( parentElement != null ) { parentElement . addContent ( content . getContent ( ) ) ; }
public class CmsShellCommands { /** * Creates a group . < p > * @ param name the name of the new group * @ param description the description of the new group * @ return the created group * @ throws Exception if something goes wrong * @ see CmsObject # createGroup ( String , String , int , String ) */ public CmsGroup createGroup ( String name , String description ) throws Exception { } }
return m_cms . createGroup ( name , description , I_CmsPrincipal . FLAG_ENABLED , null ) ;
public class MDLV2000Writer { /** * Formats an integer to fit into the connection table and changes it * to a String . * @ param x The int to be formated * @ param n Length of the String * @ return The String to be written into the connectiontable */ protected static String formatMDLInt ( int x , int n ) { } }
char [ ] buf = new char [ n ] ; Arrays . fill ( buf , ' ' ) ; String val = Integer . toString ( x ) ; if ( val . length ( ) > n ) val = "0" ; int off = n - val . length ( ) ; for ( int i = 0 ; i < val . length ( ) ; i ++ ) buf [ off + i ] = val . charAt ( i ) ; return new String ( buf ) ;
public class ConfigRenderOptions { /** * Returns options with comments toggled . This controls human - written * comments but not the autogenerated " origin of this setting " comments , * which are controlled by { @ link ConfigRenderOptions # setOriginComments } . * @ param value * true to include comments in the render * @ return options with requested setting for comments */ public ConfigRenderOptions setComments ( boolean value ) { } }
if ( value == comments ) return this ; else return new ConfigRenderOptions ( originComments , value , formatted , json ) ;
public class PutGatewayResponseResult { /** * Response templates of the < a > GatewayResponse < / a > as a string - to - string map of key - value pairs . * @ param responseTemplates * Response templates of the < a > GatewayResponse < / a > as a string - to - string map of key - value pairs . * @ return Returns a reference to this object so that method calls can be chained together . */ public PutGatewayResponseResult withResponseTemplates ( java . util . Map < String , String > responseTemplates ) { } }
setResponseTemplates ( responseTemplates ) ; return this ;
public class RandomGUID { /** * Returns a uuid / guid for a given byte array . * @ param ba * array of bytes containing the id * @ return id */ public static String fromByteArray ( byte [ ] ba ) { } }
if ( ( ba != null ) && ( ba . length == 16 ) ) { StringBuilder result = new StringBuilder ( 36 ) ; for ( int i = 0 ; i < 16 ; ++ i ) { if ( ( i == 4 ) || ( i == 6 ) || ( i == 8 ) || ( i == 10 ) ) { result . append ( '-' ) ; } result . append ( hexChars . charAt ( ( ( ba [ i ] & 0xF0 ) >>> 4 ) ) ) ; result . append ( hexChars . charAt ( ( ba [ i ] & 0xF ) ) ) ; } return result . toString ( ) ; } return null ;
public class CommerceNotificationTemplateUserSegmentRelPersistenceImpl { /** * Caches the commerce notification template user segment rels in the entity cache if it is enabled . * @ param commerceNotificationTemplateUserSegmentRels the commerce notification template user segment rels */ @ Override public void cacheResult ( List < CommerceNotificationTemplateUserSegmentRel > commerceNotificationTemplateUserSegmentRels ) { } }
for ( CommerceNotificationTemplateUserSegmentRel commerceNotificationTemplateUserSegmentRel : commerceNotificationTemplateUserSegmentRels ) { if ( entityCache . getResult ( CommerceNotificationTemplateUserSegmentRelModelImpl . ENTITY_CACHE_ENABLED , CommerceNotificationTemplateUserSegmentRelImpl . class , commerceNotificationTemplateUserSegmentRel . getPrimaryKey ( ) ) == null ) { cacheResult ( commerceNotificationTemplateUserSegmentRel ) ; } else { commerceNotificationTemplateUserSegmentRel . resetOriginalValues ( ) ; } }
public class BijectiveNsMap { /** * Method to add a new prefix - to - URI mapping for the current scope . * Note that it should NOT be used for the default namespace * declaration * @ param prefix Prefix to bind * @ param uri URI to bind to the prefix * @ return If the prefix was already bound , the URI it was bound to : * null if it ' s a new binding for the current scope . */ public String addMapping ( String prefix , String uri ) { } }
String [ ] strs = mNsStrings ; int phash = prefix . hashCode ( ) ; for ( int ix = mScopeStart , end = mScopeEnd ; ix < end ; ix += 2 ) { String thisP = strs [ ix ] ; if ( thisP == prefix || ( thisP . hashCode ( ) == phash && thisP . equals ( prefix ) ) ) { // Overriding an existing mapping String old = strs [ ix + 1 ] ; strs [ ix + 1 ] = uri ; return old ; } } // no previous binding , let ' s just add it at the end if ( mScopeEnd >= strs . length ) { // let ' s just double the array sizes . . . strs = DataUtil . growArrayBy ( strs , strs . length ) ; mNsStrings = strs ; } strs [ mScopeEnd ++ ] = prefix ; strs [ mScopeEnd ++ ] = uri ; return null ;
public class CmsSecurityManager { /** * Replaces the rewrite aliases for a given site root . < p > * @ param requestContext the current request context * @ param siteRoot the site root for which the rewrite aliases should be replaced * @ param newAliases the new list of aliases for the given site root * @ throws CmsException if something goes wrong */ public void saveRewriteAliases ( CmsRequestContext requestContext , String siteRoot , List < CmsRewriteAlias > newAliases ) throws CmsException { } }
CmsDbContext dbc = m_dbContextFactory . getDbContext ( requestContext ) ; try { // checkOfflineProject ( dbc ) ; // checkPermissions ( dbc , resource , CmsPermissionSet . ACCESS _ WRITE , true , CmsResourceFilter . ALL ) ; m_driverManager . saveRewriteAliases ( dbc , siteRoot , newAliases ) ; } catch ( Exception e ) { dbc . report ( null , Messages . get ( ) . container ( Messages . ERR_DB_OPERATION_0 ) , e ) ; } finally { dbc . clear ( ) ; }
public class PenalizedEvaluation { /** * Add a penalty expressed by a penalizing validation object . A key is * required that can be used to retrieve the validation object later . * @ param key key used to retrieve the validation object later * @ param penalizingValidation penalizing validation that indicates the assigned penalty */ public void addPenalizingValidation ( Object key , PenalizingValidation penalizingValidation ) { } }
initMapOnce ( ) ; penalties . put ( key , penalizingValidation ) ; // update penalized value if ( ! penalizingValidation . passed ( ) ) { assignedPenalties = true ; double p = penalizingValidation . getPenalty ( ) ; penalizedValue += minimizing ? p : - p ; }
public class JobTracker { /** * The periodic heartbeat mechanism between the { @ link TaskTracker } and * the { @ link JobTracker } . * The { @ link JobTracker } processes the status information sent by the * { @ link TaskTracker } and responds with instructions to start / stop * tasks or jobs , and also ' reset ' instructions during contingencies . */ public HeartbeatResponse heartbeat ( TaskTrackerStatus status , boolean restarted , boolean initialContact , boolean acceptNewTasks , short responseId ) throws IOException { } }
if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( "Got heartbeat from: " + status . getTrackerName ( ) + " (restarted: " + restarted + " initialContact: " + initialContact + " acceptNewTasks: " + acceptNewTasks + ")" + " with responseId: " + responseId ) ; } short newResponseId ; boolean shouldSchedule = false ; TaskTrackerStatus taskTrackerStatus ; String trackerName ; synchronized ( this ) { // Make sure heartbeat is from a tasktracker allowed by the jobtracker . if ( ! acceptTaskTracker ( status ) ) { throw new DisallowedTaskTrackerException ( status ) ; } // First check if the last heartbeat response got through trackerName = status . getTrackerName ( ) ; long now = getClock ( ) . getTime ( ) ; if ( restarted ) { faultyTrackers . markTrackerHealthy ( status . getHost ( ) ) ; } else { // This updates faulty tracker information . faultyTrackers . shouldAssignTasksToTracker ( status . getHost ( ) , now ) ; } HeartbeatResponse prevHeartbeatResponse = trackerToHeartbeatResponseMap . get ( trackerName ) ; if ( initialContact != true ) { // If this isn ' t the ' initial contact ' from the tasktracker , // there is something seriously wrong if the JobTracker has // no record of the ' previous heartbeat ' ; if so , ask the // tasktracker to re - initialize itself . if ( prevHeartbeatResponse == null ) { // Jobtracker might have restarted but no recovery is needed // otherwise this code should not be reached LOG . warn ( "Serious problem, cannot find record of 'previous' " + "heartbeat for '" + trackerName + "'; reinitializing the tasktracker" ) ; return new HeartbeatResponse ( responseId , new TaskTrackerAction [ ] { new ReinitTrackerAction ( ) } ) ; } else { // It is completely safe to not process a ' duplicate ' heartbeat from a // { @ link TaskTracker } since it resends the heartbeat when rpcs are // lost see { @ link TaskTracker . transmitHeartbeat ( ) } ; // acknowledge it by re - sending the previous response to let the // { @ link TaskTracker } go forward . if ( prevHeartbeatResponse . getResponseId ( ) != responseId ) { LOG . info ( "Ignoring 'duplicate' heartbeat from '" + trackerName + "'; resending the previous 'lost' response" ) ; return prevHeartbeatResponse ; } } } // Process this heartbeat newResponseId = ( short ) ( responseId + 1 ) ; status . setLastSeen ( now ) ; if ( ! processHeartbeat ( status , initialContact ) ) { if ( prevHeartbeatResponse != null ) { trackerToHeartbeatResponseMap . remove ( trackerName ) ; } return new HeartbeatResponse ( newResponseId , new TaskTrackerAction [ ] { new ReinitTrackerAction ( ) } ) ; } shouldSchedule = acceptNewTasks && ! faultyTrackers . isBlacklisted ( status . getHost ( ) ) ; taskTrackerStatus = shouldSchedule ? getTaskTrackerStatus ( trackerName ) : null ; } // synchronized JobTracker // Initialize the response to be sent for the heartbeat HeartbeatResponse response = new HeartbeatResponse ( newResponseId , null ) ; List < TaskTrackerAction > actions = new ArrayList < TaskTrackerAction > ( ) ; List < Task > setupCleanupTasks = null ; // Check for setup / cleanup tasks to be executed on the tasktracker if ( shouldSchedule ) { if ( taskTrackerStatus == null ) { LOG . warn ( "Unknown task tracker polling; ignoring: " + trackerName ) ; } else { setupCleanupTasks = getSetupAndCleanupTasks ( taskTrackerStatus ) ; } } synchronized ( this ) { // Check for tasks to be killed // we compute this first so that additional tasks can be scheduled // to compensate for the kill actions List < TaskTrackerAction > killTasksList = getTasksToKill ( trackerName ) ; if ( killTasksList != null ) { actions . addAll ( killTasksList ) ; } List < Task > tasks = null ; // Check for map / reduce tasks to be executed on the tasktracker // ignore any contribution by setup / cleanup tasks - it ' s ok to try // and overschedule since setup / cleanup tasks are super fast if ( taskTrackerStatus != null ) { // This tells Scheduler how many MAP / REDUCE slots will be released after // heartbeat . So that the Scheduler can pre - schedule them . int mapsReleased = countSlotsReleased ( killTasksList , setupCleanupTasks , taskTrackerStatus , TaskType . MAP ) ; int reducesReleased = countSlotsReleased ( killTasksList , setupCleanupTasks , taskTrackerStatus , TaskType . REDUCE ) ; status . setMapsReleased ( mapsReleased ) ; status . setReducesReleased ( reducesReleased ) ; List < Task > assignedTasks = taskScheduler . assignTasks ( taskTrackers . get ( trackerName ) ) ; if ( ( setupCleanupTasks != null ) && ( assignedTasks != null ) ) { // tasks is immutable . so merge the tasks and assignedTasks into a new list // make sure that the setup / cleanup tasks go first since we can be overscheduling // tasks here and we need to make sure that the setup / cleanup is run first tasks = new ArrayList < Task > ( assignedTasks . size ( ) + setupCleanupTasks . size ( ) ) ; tasks . addAll ( setupCleanupTasks ) ; tasks . addAll ( assignedTasks ) ; } else { tasks = ( setupCleanupTasks != null ) ? setupCleanupTasks : assignedTasks ; } } if ( tasks != null ) { for ( Task task : tasks ) { TaskAttemptID taskid = task . getTaskID ( ) ; JobInProgress job = getJob ( taskid . getJobID ( ) ) ; if ( job != null ) { createTaskEntry ( taskid , taskTrackerStatus . getTrackerName ( ) , job . getTaskInProgress ( taskid . getTaskID ( ) ) ) ; } else { // because we do not hold the jobtracker lock throughout this // routine - there is a small chance that the job for the task // we are trying to schedule no longer exists . ignore such tasks LOG . warn ( "Unable to find job corresponding to task: " + taskid . toString ( ) ) ; } expireLaunchingTasks . addNewTask ( task . getTaskID ( ) ) ; LOG . debug ( trackerName + " -> LaunchTask: " + task . getTaskID ( ) ) ; actions . add ( new LaunchTaskAction ( task ) ) ; } } // Check for jobs to be killed / cleanedup List < TaskTrackerAction > killJobsList = getJobsForCleanup ( trackerName ) ; if ( killJobsList != null ) { actions . addAll ( killJobsList ) ; } // Check for tasks whose outputs can be saved List < TaskTrackerAction > commitTasksList = getTasksToSave ( status ) ; if ( commitTasksList != null ) { actions . addAll ( commitTasksList ) ; } // calculate next heartbeat interval and put in heartbeat response int nextInterval = getNextHeartbeatInterval ( ) ; response . setHeartbeatInterval ( nextInterval ) ; response . setActions ( actions . toArray ( new TaskTrackerAction [ actions . size ( ) ] ) ) ; // Update the trackerToHeartbeatResponseMap trackerToHeartbeatResponseMap . put ( trackerName , response ) ; // Done processing the hearbeat , now remove ' marked ' tasks removeMarkedTasks ( trackerName ) ; return response ; } // synchronized JobTracker
public class EpicsApi { /** * Updates an epic - issue association . * < pre > < code > GitLab Endpoint : PUT / groups / : id / epics / : epic _ iid / issues / : issue _ id < / code > < / pre > * @ param groupIdOrPath the group ID , path of the group , or a Group instance holding the group ID or path * @ param epicIid the Epic IID that the issue is assigned to * @ param issueIid the issue IID to update * @ param moveBeforeId the ID of the issue - epic association that should be placed before the link in the question ( optional ) * @ param moveAfterId the ID of the issue - epic association that should be placed after the link in the question ( optional ) * @ return an EpicIssue instance containing info on the newly assigned epic issue * @ throws GitLabApiException if any exception occurs */ public EpicIssue updateIssue ( Object groupIdOrPath , Integer epicIid , Integer issueIid , Integer moveBeforeId , Integer moveAfterId ) throws GitLabApiException { } }
GitLabApiForm form = new GitLabApiForm ( ) . withParam ( "move_before_id" , moveBeforeId ) . withParam ( "move_after_id" , moveAfterId ) ; Response response = post ( Response . Status . OK , form , "groups" , getGroupIdOrPath ( groupIdOrPath ) , "epics" , epicIid , "issues" , issueIid ) ; return ( response . readEntity ( EpicIssue . class ) ) ;
public class CatalogUtil { /** * Load an in - memory catalog jar file from jar bytes . * @ param catalogBytes * @ param log * @ return The in - memory jar containing the loaded catalog . * @ throws IOException If the catalog cannot be loaded . */ public static InMemoryJarfile loadInMemoryJarFile ( byte [ ] catalogBytes ) throws IOException { } }
assert ( catalogBytes != null ) ; InMemoryJarfile jarfile = new InMemoryJarfile ( catalogBytes ) ; if ( ! jarfile . containsKey ( CATALOG_FILENAME ) ) { throw new IOException ( "Database catalog not found - please build your application using the current version of VoltDB." ) ; } return jarfile ;
public class ReferenceElement { /** * Return a list of all reference extensions contained in a stanza . * If there are no reference elements , return an empty list . * @ param stanza stanza * @ return list of all references contained in the stanza */ public static List < ReferenceElement > getReferencesFromStanza ( Stanza stanza ) { } }
List < ReferenceElement > references = new ArrayList < > ( ) ; List < ExtensionElement > extensions = stanza . getExtensions ( ReferenceElement . ELEMENT , ReferenceManager . NAMESPACE ) ; for ( ExtensionElement e : extensions ) { references . add ( ( ReferenceElement ) e ) ; } return references ;
public class RegularFile { /** * Transfers up to { @ code count } bytes from the given channel to this file starting at position * { @ code pos } . Returns the number of bytes transferred . If { @ code pos } is greater than the * current size of this file , the file is truncated up to size { @ code pos } before writing . * @ throws IOException if the file needs more blocks but the disk is full or if reading from src * throws an exception */ public long transferFrom ( ReadableByteChannel src , long pos , long count ) throws IOException { } }
prepareForWrite ( pos , 0 ) ; // don ' t assume the full count bytes will be written if ( count == 0 ) { return 0 ; } long remaining = count ; int blockIndex = blockIndex ( pos ) ; byte [ ] block = blockForWrite ( blockIndex ) ; int off = offsetInBlock ( pos ) ; ByteBuffer buf = ByteBuffer . wrap ( block , off , length ( off , remaining ) ) ; long currentPos = pos ; int read = 0 ; while ( buf . hasRemaining ( ) ) { read = src . read ( buf ) ; if ( read == - 1 ) { break ; } currentPos += read ; remaining -= read ; } // update size before trying to get next block in case the disk is out of space if ( currentPos > size ) { size = currentPos ; } if ( read != - 1 ) { outer : while ( remaining > 0 ) { block = blockForWrite ( ++ blockIndex ) ; buf = ByteBuffer . wrap ( block , 0 , length ( remaining ) ) ; while ( buf . hasRemaining ( ) ) { read = src . read ( buf ) ; if ( read == - 1 ) { break outer ; } currentPos += read ; remaining -= read ; } if ( currentPos > size ) { size = currentPos ; } } } if ( currentPos > size ) { size = currentPos ; } return currentPos - pos ;
public class JPAUtils { /** * Find Joined Root of type clazz * @ param < T > * @ param query the criteria query * @ param rootClass the root class * @ param joinClass the join class * @ return the Join */ public static < T , K > Join < T , K > findJoinedType ( CriteriaQuery < T > query , Class < T > rootClass , Class < K > joinClass ) { } }
Root < T > root = findRoot ( query , rootClass ) ; return findJoinedType ( root , rootClass , joinClass ) ;
public class Async { /** * Resumes a paused queue * @ param queueName queue name */ public void resume ( String queueName ) { } }
try { getQueueControl ( queueName ) . resume ( ) ; } catch ( Exception e ) { throw new AsyncException ( e ) ; }
public class MESubscription { /** * Returns true if this proxy sub was from a foreign bus in a secured env . * @ return The userid . */ final boolean isForeignSecuredProxy ( ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) { SibTr . entry ( tc , "isForeignSecuredProxy" ) ; SibTr . exit ( tc , "isForeignSecuredProxy" , new Boolean ( _foreignSecuredProxy ) ) ; } return _foreignSecuredProxy ;
public class AppEventsLogger { /** * Log an app event with the specified name , supplied value , and set of parameters . * @ param eventName eventName used to denote the event . Choose amongst the EVENT _ NAME _ * constants in * { @ link AppEventsConstants } when possible . Or create your own if none of the EVENT _ NAME _ * * constants are applicable . * Event names should be 40 characters or less , alphanumeric , and can include spaces , underscores * or hyphens , but mustn ' t have a space or hyphen as the first character . Any given app should * have no more than ~ 300 distinct event names . * @ param valueToSum a value to associate with the event which will be summed up in Insights for across all * instances of the event , so that average values can be determined , etc . * @ param parameters A Bundle of parameters to log with the event . Insights will allow looking at the logs of these * events via different parameter values . You can log on the order of 10 parameters with each * distinct eventName . It ' s advisable to keep the number of unique values provided for each * parameter in the , at most , thousands . As an example , don ' t attempt to provide a unique * parameter value for each unique user in your app . You won ' t get meaningful aggregate reporting * on so many parameter values . The values in the bundles should be Strings or numeric values . */ public void logEvent ( String eventName , double valueToSum , Bundle parameters ) { } }
logEvent ( eventName , valueToSum , parameters , false ) ;
public class EncodingBase64InputStream { /** * Fill the buffer with more data from the InputStream , if there is any . * @ throws IOException * from the inner InputStream */ private void readMoreBytesFromStream ( ) throws IOException { } }
if ( ! innerStreamHasMoreData ) { return ; } int bufferSpaceAvailable = buffer . length - bytesInBuffer ; if ( bufferSpaceAvailable <= 0 ) { return ; } int bytesRead = stream . read ( buffer , bytesInBuffer , bufferSpaceAvailable ) ; if ( bytesRead == - 1 ) { innerStreamHasMoreData = false ; } else { bytesInBuffer += bytesRead ; }
public class Ifc2x3tc1FactoryImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public String convertTristateToString ( EDataType eDataType , Object instanceValue ) { } }
return instanceValue == null ? null : instanceValue . toString ( ) ;
public class CPDefinitionLinkPersistenceImpl { /** * Creates a new cp definition link with the primary key . Does not add the cp definition link to the database . * @ param CPDefinitionLinkId the primary key for the new cp definition link * @ return the new cp definition link */ @ Override public CPDefinitionLink create ( long CPDefinitionLinkId ) { } }
CPDefinitionLink cpDefinitionLink = new CPDefinitionLinkImpl ( ) ; cpDefinitionLink . setNew ( true ) ; cpDefinitionLink . setPrimaryKey ( CPDefinitionLinkId ) ; String uuid = PortalUUIDUtil . generate ( ) ; cpDefinitionLink . setUuid ( uuid ) ; cpDefinitionLink . setCompanyId ( companyProvider . getCompanyId ( ) ) ; return cpDefinitionLink ;
public class ExtractionUtil { /** * Extracts the file contained in a Zip archive . The extracted file is * placed in the exact same path as the file specified . * @ param file the archive file * @ throws FileNotFoundException thrown if the file does not exist * @ throws IOException thrown if there is an error extracting the file . */ public static void extractZip ( File file ) throws FileNotFoundException , IOException { } }
final String originalPath = file . getPath ( ) ; final File zip = new File ( originalPath + ".zip" ) ; if ( zip . isFile ( ) && ! zip . delete ( ) ) { LOGGER . debug ( "Failed to delete initial temporary file when extracting 'zip' {}" , zip . toString ( ) ) ; zip . deleteOnExit ( ) ; } if ( ! file . renameTo ( zip ) ) { throw new IOException ( "Unable to rename '" + file . getPath ( ) + "'" ) ; } final File newFile = new File ( originalPath ) ; try ( FileInputStream fis = new FileInputStream ( zip ) ; ZipInputStream cin = new ZipInputStream ( fis ) ; FileOutputStream out = new FileOutputStream ( newFile ) ) { cin . getNextEntry ( ) ; IOUtils . copy ( cin , out ) ; } finally { if ( zip . isFile ( ) && ! org . apache . commons . io . FileUtils . deleteQuietly ( zip ) ) { LOGGER . debug ( "Failed to delete temporary file when extracting 'zip' {}" , zip . toString ( ) ) ; zip . deleteOnExit ( ) ; } }
public class NodeListProcessor { /** * Add a default processing that will be applied when no specific processor is found . * @ param processor * the default processor . */ public void addDefaultProcessor ( NodeProcessor processor ) { } }
if ( null == processor ) { throw new IllegalArgumentException ( "Processor should not be null." ) ; } getActionPool ( ) . put ( NODE_NAME__DEFAULT , processor ) ;
public class ExecutorServiceHelpers { /** * Executes the given task on the given Executor . * @ param task The RunnableWithException to execute . * @ param exceptionHandler A Consumer that will be invoked in case the task threw an Exception . This is not invoked if * the executor could not execute the given task . * @ param runFinally A Runnable that is guaranteed to be invoked at the end of this execution . If the executor * did accept the task , it will be invoked after the task is complete ( or ended in failure ) . * If the executor did not accept the task , it will be executed when this method returns . * @ param executor An Executor to execute the task on . */ public static void execute ( RunnableWithException task , Consumer < Throwable > exceptionHandler , Runnable runFinally , Executor executor ) { } }
Preconditions . checkNotNull ( task , "task" ) ; Preconditions . checkNotNull ( exceptionHandler , "exceptionHandler" ) ; Preconditions . checkNotNull ( runFinally , "runFinally" ) ; boolean scheduledSuccess = false ; try { executor . execute ( ( ) -> { try { task . run ( ) ; } catch ( Throwable ex ) { if ( ! Exceptions . mustRethrow ( ex ) ) { // Invoke the exception handler , but there ' s no point in rethrowing the exception , as it will simply // be ignored by the executor . exceptionHandler . accept ( ex ) ; } } finally { runFinally . run ( ) ; } } ) ; scheduledSuccess = true ; } finally { // Invoke the finally callback in case we were not able to successfully schedule the task . if ( ! scheduledSuccess ) { runFinally . run ( ) ; } }
public class DZcs_lsolve { /** * Solves a lower triangular system Lx = b where x and b are dense . x = b on * input , solution on output . * @ param L * column - compressed , lower triangular matrix * @ param x * size n , right hand side on input , solution on output * @ return true if successful , false on error */ public static boolean cs_lsolve ( DZcs L , DZcsa x ) { } }
int p , j , n , Lp [ ] , Li [ ] ; DZcsa Lx = new DZcsa ( ) ; if ( ! CS_CSC ( L ) || x == null ) return ( false ) ; /* check inputs */ n = L . n ; Lp = L . p ; Li = L . i ; Lx . x = L . x ; for ( j = 0 ; j < n ; j ++ ) { x . set ( j , cs_cdiv ( x . get ( j ) , Lx . get ( Lp [ j ] ) ) ) ; for ( p = Lp [ j ] + 1 ; p < Lp [ j + 1 ] ; p ++ ) { x . set ( Li [ p ] , cs_cminus ( x . get ( Li [ p ] ) , cs_cmult ( Lx . get ( p ) , x . get ( j ) ) ) ) ; } } return ( true ) ;
public class AbstractTranslator { /** * Translates user into user id * @ param user user * @ return user id */ protected UUID translateUserId ( User user ) { } }
if ( user == null ) { return null ; } return userController . getUserKeycloakId ( user ) ;
public class GSECOLImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public void eSet ( int featureID , Object newValue ) { } }
switch ( featureID ) { case AfplibPackage . GSECOL__COLOR : setCOLOR ( ( Integer ) newValue ) ; return ; } super . eSet ( featureID , newValue ) ;
public class MinimizeExitPoints { /** * Attempt to remove explicit exits from switch cases that also occur implicitly * after the switch . */ void tryMinimizeSwitchCaseExits ( Node n , Token exitType , @ Nullable String labelName ) { } }
checkState ( NodeUtil . isSwitchCase ( n ) ) ; checkState ( n != n . getParent ( ) . getLastChild ( ) ) ; Node block = n . getLastChild ( ) ; Node maybeBreak = block . getLastChild ( ) ; if ( maybeBreak == null || ! maybeBreak . isBreak ( ) || maybeBreak . hasChildren ( ) ) { // Can not minimize exits from a case without an explicit break from the switch . return ; } // Now try to minimize the exits of the last child before the break , if it is removed // look at what has become the child before the break . Node childBeforeBreak = maybeBreak . getPrevious ( ) ; while ( childBeforeBreak != null ) { Node c = childBeforeBreak ; tryMinimizeExits ( c , exitType , labelName ) ; // If the node is still the last child , we are done . childBeforeBreak = maybeBreak . getPrevious ( ) ; if ( c == childBeforeBreak ) { break ; } }
public class ReceiveMessageBuilder { /** * Expect this message header data as model object which is mapped to a character sequence * using the default object to json mapper before validation is performed . * @ param model * @ param objectMapper * @ return */ public T headerFragment ( Object model , ObjectMapper objectMapper ) { } }
try { return header ( objectMapper . writer ( ) . writeValueAsString ( model ) ) ; } catch ( JsonProcessingException e ) { throw new CitrusRuntimeException ( "Failed to map object graph for message header data" , e ) ; }
public class LibertyFeaturesToMavenRepo { /** * Find compile dependency from a zip entry in the ESA * @ param zipEntryPath The entry path in the zip * @ param groupId The group ID of the dependency to look for * @ return Maven coordinates corresponding to the entry */ private static MavenCoordinates findCompileDependency ( String zipEntryPath , String groupId ) { } }
int apiNameIndex = zipEntryPath . indexOf ( groupId ) ; int extensionIndex = zipEntryPath . lastIndexOf ( ".jar" ) ; if ( apiNameIndex >= 0 && extensionIndex >= 0 ) { String fileNameWithoutExtension = zipEntryPath . substring ( apiNameIndex , extensionIndex ) ; String artifactId = fileNameWithoutExtension . substring ( 0 , fileNameWithoutExtension . lastIndexOf ( "_" ) ) ; String versionId = fileNameWithoutExtension . substring ( fileNameWithoutExtension . lastIndexOf ( "_" ) + 1 , fileNameWithoutExtension . length ( ) ) ; MavenCoordinates coordinates = new MavenCoordinates ( groupId , artifactId , versionId ) ; System . out . println ( "Found compile dependency: " + coordinates ) ; return coordinates ; } return null ;
public class CmsDriverManager { /** * Returns the date when the resource was last visited by the user . < p > * @ param dbc the database context * @ param poolName the name of the database pool to use * @ param user the user to check the date * @ param resource the resource to check the date * @ return the date when the resource was last visited by the user * @ throws CmsException if something goes wrong */ public long getDateLastVisitedBy ( CmsDbContext dbc , String poolName , CmsUser user , CmsResource resource ) throws CmsException { } }
return m_subscriptionDriver . getDateLastVisitedBy ( dbc , poolName , user , resource ) ;
public class authenticationradiusaction { /** * Use this API to fetch all the authenticationradiusaction resources that are configured on netscaler . */ public static authenticationradiusaction [ ] get ( nitro_service service ) throws Exception { } }
authenticationradiusaction obj = new authenticationradiusaction ( ) ; authenticationradiusaction [ ] response = ( authenticationradiusaction [ ] ) obj . get_resources ( service ) ; return response ;
public class DefaultConfigurationReaderInterceptor { /** * Creates a list of { @ link ResourceTypeHandler } s . Subclasses can optionally * override this method and add more handlers to the list . * @ return */ protected List < ResourceTypeHandler < ? > > getResourceTypeHandlers ( ) { } }
final List < ResourceTypeHandler < ? > > handlers = new ArrayList < ResourceTypeHandler < ? > > ( ) ; handlers . add ( new FileResourceTypeHandler ( getRelativeParentDirectory ( ) ) ) ; handlers . add ( new UrlResourceTypeHandler ( ) ) ; handlers . add ( new ClasspathResourceTypeHandler ( ) ) ; handlers . add ( new VfsResourceTypeHandler ( ) ) ; return handlers ;
public class KeywordEstimateRequest { /** * Gets the maxCpc value for this KeywordEstimateRequest . * @ return maxCpc * The max CPC bid for this keyword . * In general , the { @ code maxCpc } of a { @ link KeywordEstimateRequest } * is * optional , since there is usually another { @ code * maxCpc } that can be used , * such as the { @ code maxCpc } on an existing keyword , * an existing or * overriding { @ code maxCpc } of containing { @ link * AdGroupEstimateRequest } . * However , if there is no backup value of { @ code * maxCpc } anywhere along the * line , you must provide a value for { @ code maxCpc } * in * { @ link KeywordEstimateRequest } . This would happen , * for example , if the * { @ link KeywordEstimateRequest } is for a new keyword . */ public com . google . api . ads . adwords . axis . v201809 . cm . Money getMaxCpc ( ) { } }
return maxCpc ;
public class OrientModelGraph { /** * Tests if a transformation description has an id and adds an unique id if it hasn ' t one . */ private void checkTransformationDescriptionId ( TransformationDescription description ) { } }
if ( description . getId ( ) == null ) { description . setId ( "EKBInternal-" + counter . incrementAndGet ( ) ) ; }
public class MutableBkTree { /** * Adds all of the given elements to this tree . * @ param elements elements */ public void addAll ( Iterable < ? extends E > elements ) { } }
if ( elements == null ) throw new NullPointerException ( ) ; for ( E element : elements ) { add ( element ) ; }
public class AbstractCassandraStorage { /** * construct a map to store the mashaller type to cassandra data type mapping */ protected Map < MarshallerType , AbstractType > getDefaultMarshallers ( CfDef cfDef ) throws IOException { } }
Map < MarshallerType , AbstractType > marshallers = new EnumMap < MarshallerType , AbstractType > ( MarshallerType . class ) ; AbstractType comparator ; AbstractType subcomparator ; AbstractType default_validator ; AbstractType key_validator ; comparator = parseType ( cfDef . getComparator_type ( ) ) ; subcomparator = parseType ( cfDef . getSubcomparator_type ( ) ) ; default_validator = parseType ( cfDef . getDefault_validation_class ( ) ) ; key_validator = parseType ( cfDef . getKey_validation_class ( ) ) ; marshallers . put ( MarshallerType . COMPARATOR , comparator ) ; marshallers . put ( MarshallerType . DEFAULT_VALIDATOR , default_validator ) ; marshallers . put ( MarshallerType . KEY_VALIDATOR , key_validator ) ; marshallers . put ( MarshallerType . SUBCOMPARATOR , subcomparator ) ; return marshallers ;
public class DeIdentifyUtil { /** * Deidentify left . * @ param str the str * @ param size the size * @ return the string * @ since 2.0.0 */ public static String deidentifyLeft ( String str , int size ) { } }
int repeat ; if ( size > str . length ( ) ) { repeat = str . length ( ) ; } else { repeat = size ; } return StringUtils . overlay ( str , StringUtils . repeat ( '*' , repeat ) , 0 , size ) ;
public class PagedWidget { /** * Removes the specified item from the panel . If the item is currently being displayed , its * interface element will be removed as well . */ public void removeItem ( T item ) { } }
if ( _model == null ) { return ; // if we have no model , stop here } // remove the item from our data model _model . removeItem ( item ) ; // force a relayout of this page displayPage ( _page , true ) ;
public class DefaultParameterEquivalencer { /** * { @ inheritDoc } */ @ Override public SkinnyUUID getUUID ( Parameter param ) throws ResourceDownloadError , IndexingFailure , IOException { } }
String srl = param . getNamespace ( ) . getResourceLocation ( ) ; openEquivalence ( srl ) ; JDBMEquivalenceLookup lookup = openEquivalences . get ( srl ) ; return lookup . lookup ( param . getValue ( ) ) ;
public class SupplementaryMaterial { /** * Gets the value of the attribOrPermissions property . * This accessor method returns a reference to the live list , * not a snapshot . Therefore any modification you make to the * returned list will be present inside the JAXB object . * This is why there is not a < CODE > set < / CODE > method for the attribOrPermissions property . * For example , to add a new item , do as follows : * < pre > * getAttribOrPermissions ( ) . add ( newItem ) ; * < / pre > * Objects of the following type ( s ) are allowed in the list * { @ link Attrib } * { @ link Permissions } */ public java . util . List < Object > getAttribOrPermissions ( ) { } }
if ( attribOrPermissions == null ) { attribOrPermissions = new ArrayList < Object > ( ) ; } return this . attribOrPermissions ;
public class TileBoundingBoxUtils { /** * Get the zoom level of where the web mercator bounding box fits into the * complete world * @ param webMercatorBoundingBox * web mercator bounding box * @ return zoom level */ public static int getZoomLevel ( BoundingBox webMercatorBoundingBox ) { } }
double worldLength = ProjectionConstants . WEB_MERCATOR_HALF_WORLD_WIDTH * 2 ; double longitudeDistance = webMercatorBoundingBox . getMaxLongitude ( ) - webMercatorBoundingBox . getMinLongitude ( ) ; double latitudeDistance = webMercatorBoundingBox . getMaxLatitude ( ) - webMercatorBoundingBox . getMinLatitude ( ) ; if ( longitudeDistance <= 0 ) { longitudeDistance = Double . MIN_VALUE ; } if ( latitudeDistance <= 0 ) { latitudeDistance = Double . MIN_VALUE ; } int widthTiles = ( int ) ( worldLength / longitudeDistance ) ; int heightTiles = ( int ) ( worldLength / latitudeDistance ) ; int tilesPerSide = Math . min ( widthTiles , heightTiles ) ; tilesPerSide = Math . max ( tilesPerSide , 1 ) ; int zoom = zoomFromTilesPerSide ( tilesPerSide ) ; return zoom ;
public class AttributeService { /** * Sets the value of the given attribute to the given value for the given file . */ public void setAttribute ( File file , String attribute , Object value , boolean create ) { } }
String view = getViewName ( attribute ) ; String attr = getSingleAttribute ( attribute ) ; setAttributeInternal ( file , view , attr , value , create ) ;
public class HttpInboundLink { /** * @ see com . ibm . wsspi . channelfw . base . InboundProtocolLink # destroy ( java . lang . Exception ) */ @ Override public void destroy ( Exception e ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "Destroying inbound link: " + this + " " + getVirtualConnection ( ) ) ; } // if this object is not active , then just return out synchronized ( this ) { if ( ! this . bIsActive ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "Ignoring destroy on an inactive object" ) ; } return ; } this . bIsActive = false ; } // 291714 - clean up the statemap getVirtualConnection ( ) . getStateMap ( ) . remove ( CallbackIDs . CALLBACK_HTTPICL ) ; if ( getChannel ( ) . getHttpConfig ( ) . runningOnZOS ( ) ) { // 363633 - remove the buffer size value if present getVirtualConnection ( ) . getStateMap ( ) . remove ( HttpConstants . HTTPReadBufferSize ) ; } // now clean out any other app connlinks we may have picked up if ( null != this . appSides ) { // the super . destroy without an exception just nulls out values // the list of appside connlinks includes the current one super . destroy ( ) ; for ( ConnectionReadyCallback appside : this . appSides ) { appside . destroy ( e ) ; } this . appSides = null ; } else { // if we only ever got one connlink above , then call the standard // destroy to pass the sequence along super . destroy ( e ) ; } this . myInterface . clear ( ) ; this . myInterface . destroy ( ) ; // these are no longer pooled , dereference now this . myInterface = null ; this . myTSC = null ; this . filterExceptions = false ; this . numRequestsProcessed = 0 ; this . myChannel = null ;
public class BigtableAsyncTable { /** * { @ inheritDoc } */ @ Override public CompletableFuture < Void > mutateRow ( RowMutations rowMutations ) { } }
return toCompletableFuture ( clientWrapper . mutateRowAsync ( hbaseAdapter . adapt ( rowMutations ) ) ) ;
public class ConfigurationSourceKey { /** * < p > setTypeIfNotDefault . < / p > * @ param defType a { @ link org . configureme . sources . ConfigurationSourceKey . Type } object . * @ param toChange a { @ link org . configureme . sources . ConfigurationSourceKey . Type } object . */ public void setTypeIfNotDefault ( final Type defType , final Type toChange ) { } }
this . type = defType != Type . FILE ? defType : toChange ;
public class CPRuleAssetCategoryRelLocalServiceBaseImpl { /** * Returns a range of all the cp rule asset category rels . * Useful when paginating results . Returns a maximum of < code > end - start < / code > instances . < code > start < / code > and < code > end < / code > are not primary keys , they are indexes in the result set . Thus , < code > 0 < / code > refers to the first result in the set . Setting both < code > start < / code > and < code > end < / code > to { @ link com . liferay . portal . kernel . dao . orm . QueryUtil # ALL _ POS } will return the full result set . If < code > orderByComparator < / code > is specified , then the query will include the given ORDER BY logic . If < code > orderByComparator < / code > is absent and pagination is required ( < code > start < / code > and < code > end < / code > are not { @ link com . liferay . portal . kernel . dao . orm . QueryUtil # ALL _ POS } ) , then the query will include the default ORDER BY logic from { @ link com . liferay . commerce . product . model . impl . CPRuleAssetCategoryRelModelImpl } . If both < code > orderByComparator < / code > and pagination are absent , for performance reasons , the query will not have an ORDER BY clause and the returned result set will be sorted on by the primary key in an ascending order . * @ param start the lower bound of the range of cp rule asset category rels * @ param end the upper bound of the range of cp rule asset category rels ( not inclusive ) * @ return the range of cp rule asset category rels */ @ Override public List < CPRuleAssetCategoryRel > getCPRuleAssetCategoryRels ( int start , int end ) { } }
return cpRuleAssetCategoryRelPersistence . findAll ( start , end ) ;
public class Pnky { /** * See { @ link # allFailingFast ( Iterable ) } */ @ SafeVarargs public static < V > PnkyPromise < List < V > > allFailingFast ( final PnkyPromise < ? extends V > ... promises ) { } }
return allFailingFast ( Lists . newArrayList ( promises ) ) ;
public class AbstractGenericType { /** * This method resolves the given { @ code typeVariable } in the context of the given { @ code declaringType } . * @ param typeVariable the { @ link TypeVariable } to resolve . * @ param declaringType the { @ link GenericType } where the given { @ code typeVariable } occurs or is replaced . * @ return the resolved { @ link Type } or { @ code null } if the given { @ code typeVariable } could NOT be resolved * ( e . g . it was { @ link TypeVariable # getGenericDeclaration ( ) declared } in a { @ link Class } that is NOT * { @ link Class # isAssignableFrom ( Class ) assignable from } the given { @ code declaringType } ) . */ protected Type resolveTypeVariable ( TypeVariable < ? > typeVariable , GenericType < ? > declaringType ) { } }
GenericDeclaration genericDeclaration = typeVariable . getGenericDeclaration ( ) ; if ( genericDeclaration instanceof Class < ? > ) { Class < ? > declaringClass = ( Class < ? > ) genericDeclaration ; return resolveTypeVariable ( typeVariable , declaringType , declaringClass ) ; } return null ;
public class AppServiceCertificateOrdersInner { /** * Retrieve email history . * Retrieve email history . * @ param resourceGroupName Name of the resource group to which the resource belongs . * @ param name Name of the certificate order . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the List & lt ; CertificateEmailInner & gt ; object */ public Observable < List < CertificateEmailInner > > retrieveCertificateEmailHistoryAsync ( String resourceGroupName , String name ) { } }
return retrieveCertificateEmailHistoryWithServiceResponseAsync ( resourceGroupName , name ) . map ( new Func1 < ServiceResponse < List < CertificateEmailInner > > , List < CertificateEmailInner > > ( ) { @ Override public List < CertificateEmailInner > call ( ServiceResponse < List < CertificateEmailInner > > response ) { return response . body ( ) ; } } ) ;
public class WorkspaceQuotaRestore { /** * Backups data to define file . */ protected void doBackup ( File backupFile ) throws BackupException { } }
ZipObjectWriter out = null ; try { out = new ZipObjectWriter ( PrivilegedFileHelper . zipOutputStream ( backupFile ) ) ; quotaPersister . backupWorkspaceData ( rName , wsName , out ) ; } catch ( IOException e ) { throw new BackupException ( e ) ; } finally { if ( out != null ) { try { out . close ( ) ; } catch ( IOException e ) { LOG . error ( "Can't close output stream" , e ) ; } } }
public class InverseDepsAnalyzer { /** * Returns all paths reachable from the given targets . */ private Set < Deque < Archive > > findPaths ( Graph < Archive > graph , Archive target ) { } }
// path is in reversed order Deque < Archive > path = new LinkedList < > ( ) ; path . push ( target ) ; Set < Edge < Archive > > visited = new HashSet < > ( ) ; Deque < Edge < Archive > > deque = new LinkedList < > ( ) ; deque . addAll ( graph . edgesFrom ( target ) ) ; if ( deque . isEmpty ( ) ) { return makePaths ( path ) . collect ( Collectors . toSet ( ) ) ; } Set < Deque < Archive > > allPaths = new HashSet < > ( ) ; while ( ! deque . isEmpty ( ) ) { Edge < Archive > edge = deque . pop ( ) ; if ( visited . contains ( edge ) ) continue ; Archive node = edge . v ; path . addLast ( node ) ; visited . add ( edge ) ; Set < Edge < Archive > > unvisitedDeps = graph . edgesFrom ( node ) . stream ( ) . filter ( e -> ! visited . contains ( e ) ) . collect ( Collectors . toSet ( ) ) ; trace ( "visiting %s %s (%s)%n" , edge , path , unvisitedDeps ) ; if ( unvisitedDeps . isEmpty ( ) ) { makePaths ( path ) . forEach ( allPaths :: add ) ; path . removeLast ( ) ; } // push unvisited adjacent edges unvisitedDeps . stream ( ) . forEach ( deque :: push ) ; // when the adjacent edges of a node are visited , pop it from the path while ( ! path . isEmpty ( ) ) { if ( visited . containsAll ( graph . edgesFrom ( path . peekLast ( ) ) ) ) path . removeLast ( ) ; else break ; } } return allPaths ;
public class ConfigStoreUtils { /** * Shortlist topics from config store based on whitelist / blacklist tags and * add it to { @ param whitelist } / { @ param blacklist } * If tags are not provided , blacklist and whitelist won ' t be modified */ public static void setTopicsFromConfigStore ( Properties properties , Set < String > blacklist , Set < String > whitelist , final String _blacklistTopicKey , final String _whitelistTopicKey ) { } }
Optional < String > configStoreUri = getConfigStoreUri ( properties ) ; if ( ! configStoreUri . isPresent ( ) ) { return ; } ConfigClient configClient = ConfigClient . createConfigClient ( VersionStabilityPolicy . WEAK_LOCAL_STABILITY ) ; Optional < Config > runtimeConfig = ConfigClientUtils . getOptionalRuntimeConfig ( properties ) ; if ( properties . containsKey ( GOBBLIN_CONFIG_TAGS_WHITELIST ) ) { Preconditions . checkArgument ( properties . containsKey ( GOBBLIN_CONFIG_FILTER ) , "Missing required property " + GOBBLIN_CONFIG_FILTER ) ; String filterString = properties . getProperty ( GOBBLIN_CONFIG_FILTER ) ; Path whiteListTagUri = PathUtils . mergePaths ( new Path ( configStoreUri . get ( ) ) , new Path ( properties . getProperty ( GOBBLIN_CONFIG_TAGS_WHITELIST ) ) ) ; getTopicsURIFromConfigStore ( configClient , whiteListTagUri , filterString , runtimeConfig ) . stream ( ) . filter ( ( URI u ) -> ConfigUtils . getBoolean ( getConfig ( configClient , u , runtimeConfig ) , _whitelistTopicKey , false ) ) . forEach ( ( ( URI u ) -> whitelist . add ( getTopicNameFromURI ( u ) ) ) ) ; } else if ( properties . containsKey ( GOBBLIN_CONFIG_TAGS_BLACKLIST ) ) { Preconditions . checkArgument ( properties . containsKey ( GOBBLIN_CONFIG_FILTER ) , "Missing required property " + GOBBLIN_CONFIG_FILTER ) ; String filterString = properties . getProperty ( GOBBLIN_CONFIG_FILTER ) ; Path blackListTagUri = PathUtils . mergePaths ( new Path ( configStoreUri . get ( ) ) , new Path ( properties . getProperty ( GOBBLIN_CONFIG_TAGS_BLACKLIST ) ) ) ; getTopicsURIFromConfigStore ( configClient , blackListTagUri , filterString , runtimeConfig ) . stream ( ) . filter ( ( URI u ) -> ConfigUtils . getBoolean ( getConfig ( configClient , u , runtimeConfig ) , _blacklistTopicKey , false ) ) . forEach ( ( ( URI u ) -> blacklist . add ( getTopicNameFromURI ( u ) ) ) ) ; } else { log . warn ( "None of the blacklist or whitelist tags are provided" ) ; }
public class DatabaseDAODefaultImpl { public void unregisterService ( Database database , String serviceName , String instanceName , String deviceName ) throws DevFailed { } }
String [ ] services = new String [ 0 ] ; // Get service property DbDatum data = get_property ( database , TangoConst . CONTROL_SYSTEM , TangoConst . SERVICE_PROP_NAME ) ; if ( ! data . is_empty ( ) ) services = data . extractStringArray ( ) ; // Build what to be remove and searched String target = serviceName + "/" + instanceName ; target = target . toLowerCase ( ) ; // Search if already exists boolean exists = false ; Vector < String > v = new Vector < String > ( ) ; for ( String service : services ) { String line = service . toLowerCase ( ) ; int idx = line . indexOf ( ':' ) ; if ( idx > 0 ) line = line . substring ( 0 , idx ) ; if ( line . equals ( target ) ) // Found exists = true ; else v . add ( service ) ; } if ( exists ) { // Copy vector to String array services = new String [ v . size ( ) ] ; for ( int i = 0 ; i < v . size ( ) ; i ++ ) services [ i ] = v . get ( i ) ; // And finally put property data = new DbDatum ( TangoConst . SERVICE_PROP_NAME ) ; data . insert ( services ) ; put_property ( database , TangoConst . CONTROL_SYSTEM , new DbDatum [ ] { data } ) ; }
public class FreemarkerTemplate { /** * 包装Freemarker模板 * @ param beetlTemplate Beetl的模板对象 { @ link freemarker . template . Template } * @ return { @ link FreemarkerTemplate } */ public static FreemarkerTemplate wrap ( freemarker . template . Template beetlTemplate ) { } }
return ( null == beetlTemplate ) ? null : new FreemarkerTemplate ( beetlTemplate ) ;
public class CmsSecurityManager { /** * Returns a set of users that are responsible for a specific resource . < p > * @ param context the current request context * @ param resource the resource to get the responsible users from * @ return the set of users that are responsible for a specific resource * @ throws CmsException if something goes wrong */ public Set < I_CmsPrincipal > readResponsiblePrincipals ( CmsRequestContext context , CmsResource resource ) throws CmsException { } }
Set < I_CmsPrincipal > result = null ; CmsDbContext dbc = m_dbContextFactory . getDbContext ( context ) ; try { result = m_driverManager . readResponsiblePrincipals ( dbc , resource ) ; } catch ( Exception e ) { dbc . report ( null , Messages . get ( ) . container ( Messages . ERR_READ_RESPONSIBLE_USERS_1 , resource . getRootPath ( ) ) , e ) ; } finally { dbc . clear ( ) ; } return result ;
public class VirtualMachinesInner { /** * The operation to create or update a virtual machine . * @ param resourceGroupName The name of the resource group . * @ param vmName The name of the virtual machine . * @ param parameters Parameters supplied to the Create Virtual Machine operation . * @ param serviceCallback the async ServiceCallback to handle successful and failed responses . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the { @ link ServiceFuture } object */ public ServiceFuture < VirtualMachineInner > beginCreateOrUpdateAsync ( String resourceGroupName , String vmName , VirtualMachineInner parameters , final ServiceCallback < VirtualMachineInner > serviceCallback ) { } }
return ServiceFuture . fromResponse ( beginCreateOrUpdateWithServiceResponseAsync ( resourceGroupName , vmName , parameters ) , serviceCallback ) ;
public class CmsShellCommands { /** * Rebuilds ( if required creates ) all configured search indexes . < p > * @ throws Exception if something goes wrong * @ see org . opencms . search . CmsSearchManager # rebuildAllIndexes ( org . opencms . report . I _ CmsReport ) */ public void rebuildAllIndexes ( ) throws Exception { } }
I_CmsReport report = new CmsShellReport ( m_cms . getRequestContext ( ) . getLocale ( ) ) ; OpenCms . getSearchManager ( ) . rebuildAllIndexes ( report ) ;
public class VersionableWorkspaceDataManager { /** * { @ inheritDoc } */ @ Override public List < NodeData > getChildNodesData ( final NodeData nodeData ) throws RepositoryException { } }
if ( ! this . equals ( versionDataManager ) && isSystemDescendant ( nodeData . getQPath ( ) ) ) { return versionDataManager . getChildNodesData ( nodeData ) ; } return super . getChildNodesData ( nodeData ) ;
public class DefaultProcessDiagramGenerator { /** * This method returns shape type of base element . < br > * Each element can be presented as rectangle , rhombus , or ellipse . * @ param baseElement * @ return DefaultProcessDiagramCanvas . SHAPE _ TYPE */ protected static DefaultProcessDiagramCanvas . SHAPE_TYPE getShapeType ( BaseElement baseElement ) { } }
if ( baseElement instanceof Task || baseElement instanceof Activity || baseElement instanceof TextAnnotation ) { return DefaultProcessDiagramCanvas . SHAPE_TYPE . Rectangle ; } else if ( baseElement instanceof Gateway ) { return DefaultProcessDiagramCanvas . SHAPE_TYPE . Rhombus ; } else if ( baseElement instanceof Event ) { return DefaultProcessDiagramCanvas . SHAPE_TYPE . Ellipse ; } // unknown source element , just do not correct coordinates return null ;
public class FacebookRestClient { /** * Sends a message via SMS to the user identified by < code > userId < / code > , with * the expectation that the user will reply . The SMS extended permission is required for success . * The returned mobile session ID can be stored and used in { @ link # sms _ sendResponse } when * the user replies . * @ param userId a user ID * @ param message the message to be sent via SMS * @ return a mobile session ID ( can be used in { @ link # sms _ sendResponse } ) * @ throws FacebookException in case of error , e . g . SMS is not enabled * @ throws IOException * @ see FacebookExtendedPerm # SMS * @ see < a href = " http : / / wiki . developers . facebook . com / index . php / Mobile # Application _ generated _ messages " > * Developers Wiki : Mobile : Application Generated Messages < / a > * @ see < a href = " http : / / wiki . developers . facebook . com / index . php / Mobile # Workflow " > * Developers Wiki : Mobile : Workflow < / a > */ public int sms_sendMessageWithSession ( Integer userId , CharSequence message ) throws FacebookException , IOException { } }
return extractInt ( this . callMethod ( FacebookMethod . SMS_SEND_MESSAGE , new Pair < String , CharSequence > ( "uid" , userId . toString ( ) ) , new Pair < String , CharSequence > ( "message" , message ) , new Pair < String , CharSequence > ( "req_session" , "1" ) ) ) ;
public class JDBCStorageConnection { /** * Gets an item data from database . * @ param parent * - parent QPath * @ param parentId * - parent container internal id ( depends on Multi / Single DB ) * @ param name * - item name * @ param itemType * - item type * @ return - ItemData instance * @ throws RepositoryException * Repository error * @ throws IllegalStateException * if connection is closed */ protected ItemData getItemByName ( NodeData parent , String parentId , QPathEntry name , ItemType itemType ) throws RepositoryException , IllegalStateException { } }
checkIfOpened ( ) ; try { ResultSet item = null ; try { item = findItemByName ( parentId , name . getAsString ( ) , name . getIndex ( ) ) ; while ( item . next ( ) ) { int columnClass = item . getInt ( COLUMN_CLASS ) ; if ( itemType == ItemType . UNKNOWN || columnClass == itemType . ordinal ( ) ) { return itemData ( parent . getQPath ( ) , item , columnClass , parent . getACL ( ) ) ; } } return null ; } finally { try { if ( item != null ) { item . close ( ) ; } } catch ( SQLException e ) { LOG . error ( "Can't close the ResultSet: " + e . getMessage ( ) ) ; } } } catch ( SQLException e ) { throw new RepositoryException ( e ) ; } catch ( IOException e ) { throw new RepositoryException ( e ) ; }
public class JSError { /** * Creates a JSError from a file and Node position . * @ param n Determines the line and char position and source file name * @ param type The DiagnosticType * @ param arguments Arguments to be incorporated into the message */ public static JSError make ( Node n , DiagnosticType type , String ... arguments ) { } }
return new JSError ( n . getSourceFileName ( ) , n , type , arguments ) ;
public class StripeTextUtils { /** * Converts a card number that may have spaces between the numbers into one without any spaces . * Note : method does not check that all characters are digits or spaces . * @ param cardNumberWithSpaces a card number , for instance " 4242 4242 4242 4242" * @ return the input number minus any spaces , for instance " 4242424242424242 " . * Returns { @ code null } if the input was { @ code null } or all spaces . */ @ Nullable public static String removeSpacesAndHyphens ( @ Nullable String cardNumberWithSpaces ) { } }
if ( isBlank ( cardNumberWithSpaces ) ) { return null ; } return cardNumberWithSpaces . replaceAll ( "\\s|-" , "" ) ;
public class SftpSubsystemChannel { /** * Perform a synchronous read of a file from the remote file system . This * implementation waits for acknowledgement of every data packet before * requesting additional data . * @ param handle * @ param blocksize * @ param out * @ param progress * @ param position * @ throws SftpStatusException * @ throws SshException * @ throws TransferCancelledException */ public void performSynchronousRead ( byte [ ] handle , int blocksize , OutputStream out , FileTransferProgress progress , long position ) throws SftpStatusException , SshException , TransferCancelledException { } }
if ( Log . isDebugEnabled ( ) ) { Log . debug ( this , "Performing synchronous read postion=" + position + " blocksize=" + blocksize ) ; } if ( blocksize < 1 || blocksize > 32768 ) { if ( Log . isDebugEnabled ( ) ) { Log . debug ( this , "Blocksize to large for some SFTP servers, reseting to 32K" ) ; } blocksize = 32768 ; } if ( position < 0 ) { throw new SshException ( "Position value must be greater than zero!" , SshException . BAD_API_USAGE ) ; } byte [ ] tmp = new byte [ blocksize ] ; int read ; UnsignedInteger64 offset = new UnsignedInteger64 ( position ) ; if ( position > 0 ) { if ( progress != null ) progress . progressed ( position ) ; } try { while ( ( read = readFile ( handle , offset , tmp , 0 , tmp . length ) ) > - 1 ) { if ( progress != null && progress . isCancelled ( ) ) { throw new TransferCancelledException ( ) ; } out . write ( tmp , 0 , read ) ; offset = UnsignedInteger64 . add ( offset , read ) ; if ( progress != null ) progress . progressed ( offset . longValue ( ) ) ; } } catch ( IOException e ) { throw new SshException ( e ) ; }
public class CmsCategoryService { /** * Deletes the category identified by the given path . < p > * Only the most global category matching the given category path for the * given resource will be affected . < p > * This method will try to lock the involved resource . < p > * @ param cms the current cms context * @ param categoryPath the path of the category to delete * @ param referencePath the reference path to find the category repositories * @ throws CmsException if something goes wrong */ public void deleteCategory ( CmsObject cms , String categoryPath , String referencePath ) throws CmsException { } }
CmsCategory category = readCategory ( cms , categoryPath , referencePath ) ; String folderPath = cms . getRequestContext ( ) . removeSiteRoot ( category . getRootPath ( ) ) ; CmsLock lock = cms . getLock ( folderPath ) ; if ( lock . isNullLock ( ) ) { cms . lockResource ( folderPath ) ; } else if ( lock . isLockableBy ( cms . getRequestContext ( ) . getCurrentUser ( ) ) ) { cms . changeLock ( folderPath ) ; } cms . deleteResource ( folderPath , CmsResource . DELETE_PRESERVE_SIBLINGS ) ;
public class ContentSpecParser { /** * Processes and creates a level based on the level type . * @ param parserData * @ param lineNumber The line number the level is on . * @ param levelType The type the level will represent . ie . A Chapter or Appendix * @ param line The chapter string in the content specification . * @ return The created level or null if an error occurred . * @ throws ParsingException Thrown if the line can ' t be parsed as a Level , due to incorrect syntax . */ protected Level parseLevel ( final ParserData parserData , final int lineNumber , final LevelType levelType , final String line ) throws ParsingException { } }
String splitVars [ ] = StringUtilities . split ( line , ':' , 2 ) ; // Remove the whitespace from each value in the split array splitVars = CollectionUtilities . trimStringArray ( splitVars ) ; // Create the level based on the type final Level newLvl = createEmptyLevelFromType ( lineNumber , levelType , line ) ; newLvl . setUniqueId ( "L" + lineNumber ) ; // Parse the input if ( splitVars . length >= 2 ) { final String unescapedTitle = getTitle ( splitVars [ 1 ] , '[' ) ; final String title = ProcessorUtilities . replaceEscapeChars ( unescapedTitle ) ; if ( ! isNullOrEmpty ( title ) ) { newLvl . setTitle ( title ) ; } // Get the mapping of variables final HashMap < ParserType , List < String [ ] > > variableMap = getLineVariables ( parserData , splitVars [ 1 ] , lineNumber , '[' , ']' , ',' , false , true ) ; if ( variableMap . containsKey ( ParserType . NONE ) ) { boolean optionsProcessed = false ; for ( final String [ ] variables : variableMap . get ( ParserType . NONE ) ) { if ( variables . length >= 1 ) { if ( variables [ 0 ] . matches ( CSConstants . ALL_TOPIC_ID_REGEX ) ) { final InitialContent initialContent ; if ( newLvl instanceof InitialContent ) { initialContent = ( InitialContent ) newLvl ; } else { initialContent = new InitialContent ( lineNumber , "" ) ; initialContent . setUniqueId ( "L" + lineNumber + "-1" ) ; parserData . getLevels ( ) . put ( initialContent . getUniqueId ( ) , initialContent ) ; newLvl . appendChild ( initialContent ) ; } final String topicString = unescapedTitle + " [" + StringUtilities . buildString ( variables , ", " ) + "]" ; final SpecTopic initialContentTopic = parseTopic ( parserData , topicString , lineNumber ) ; initialContent . appendSpecTopic ( initialContentTopic ) ; } else { // Process the options if ( ! optionsProcessed ) { addOptions ( parserData , newLvl , variables , 0 , line , lineNumber ) ; optionsProcessed = true ; } else { throw new ParsingException ( format ( ProcessorConstants . ERROR_DUPLICATED_RELATIONSHIP_TYPE_MSG , lineNumber , line ) ) ; } } } } } // Flatten the variable map since we ' ve gotten the useful information final HashMap < ParserType , String [ ] > flattenedVariableMap = new HashMap < ParserType , String [ ] > ( ) ; for ( final Map . Entry < ParserType , List < String [ ] > > lineVariable : variableMap . entrySet ( ) ) { flattenedVariableMap . put ( lineVariable . getKey ( ) , lineVariable . getValue ( ) . get ( 0 ) ) ; } // Add targets for the level if ( variableMap . containsKey ( ParserType . TARGET ) ) { final String [ ] targets = flattenedVariableMap . get ( ParserType . TARGET ) ; final String targetId = targets [ 0 ] ; if ( parserData . getTargetTopics ( ) . containsKey ( targetId ) ) { throw new ParsingException ( format ( ProcessorConstants . ERROR_DUPLICATE_TARGET_ID_MSG , parserData . getTargetTopics ( ) . get ( targetId ) . getLineNumber ( ) , parserData . getTargetTopics ( ) . get ( targetId ) . getText ( ) , lineNumber , line ) ) ; } else if ( parserData . getTargetLevels ( ) . containsKey ( targetId ) ) { throw new ParsingException ( format ( ProcessorConstants . ERROR_DUPLICATE_TARGET_ID_MSG , parserData . getTargetLevels ( ) . get ( targetId ) . getLineNumber ( ) , parserData . getTargetLevels ( ) . get ( targetId ) . getText ( ) , lineNumber , line ) ) ; } else { parserData . getTargetLevels ( ) . put ( targetId , newLvl ) ; newLvl . setTargetId ( targetId ) ; } } // Add the info topic for the level if ( variableMap . containsKey ( ParserType . INFO ) ) { final String [ ] infoVariables = flattenedVariableMap . get ( ParserType . INFO ) ; final InfoTopic infoTopic = new InfoTopic ( newLvl ) ; // Add the attributes to the topic addTopicAttributes ( infoTopic , parserData , infoVariables , lineNumber , line ) ; newLvl . setInfoTopic ( infoTopic ) ; } // Check for external targets // if ( variableMap . containsKey ( ParserType . EXTERNAL _ TARGET ) ) { // final String externalTargetId = variableMap . get ( ParserType . EXTERNAL _ TARGET ) [ 0 ] ; // getExternalTargetLevels ( ) . put ( externalTargetId , newLvl ) ; // newLvl . setExternalTargetId ( externalTargetId ) ; // / / Check if the level is injecting data from another content spec // if ( variableMap . containsKey ( ParserType . EXTERNAL _ CONTENT _ SPEC ) ) { // processExternalLevel ( newLvl , variableMap . get ( ParserType . EXTERNAL _ CONTENT _ SPEC ) [ 0 ] , title , line ) ; // Process any relationship content that might have been defined only if an initial content topic has been defined if ( variableMap . containsKey ( ParserType . REFER_TO ) || variableMap . containsKey ( ParserType . PREREQUISITE ) || variableMap . containsKey ( ParserType . NEXT ) || variableMap . containsKey ( ParserType . PREVIOUS ) || variableMap . containsKey ( ParserType . LINKLIST ) ) { // Check that no relationships were specified for the level if ( newLvl instanceof InitialContent ) { processLevelRelationships ( parserData , newLvl , flattenedVariableMap , line , lineNumber ) ; } else if ( newLvl . getChildLevels ( ) . isEmpty ( ) ) { throw new ParsingException ( format ( ProcessorConstants . ERROR_LEVEL_RELATIONSHIP_MSG , lineNumber , levelType . getTitle ( ) , levelType . getTitle ( ) , line ) ) ; } else { processLevelRelationships ( parserData , ( Level ) newLvl . getChildLevels ( ) . get ( 0 ) , flattenedVariableMap , line , lineNumber ) ; } } } return newLvl ;
public class ApiOvhIp { /** * Add a new backend on your IP load balancing * REST : POST / ip / loadBalancing / { serviceName } / backend * @ param weight [ required ] Weight of the backend on its zone , must be between 1 and 100 * @ param probe [ required ] The type of probe used * @ param ipBackend [ required ] IP of your backend * @ param serviceName [ required ] The internal name of your IP load balancing */ public OvhLoadBalancingTask loadBalancing_serviceName_backend_POST ( String serviceName , String ipBackend , OvhLoadBalancingBackendProbeEnum probe , Long weight ) throws IOException { } }
String qPath = "/ip/loadBalancing/{serviceName}/backend" ; StringBuilder sb = path ( qPath , serviceName ) ; HashMap < String , Object > o = new HashMap < String , Object > ( ) ; addBody ( o , "ipBackend" , ipBackend ) ; addBody ( o , "probe" , probe ) ; addBody ( o , "weight" , weight ) ; String resp = exec ( qPath , "POST" , sb . toString ( ) , o ) ; return convertTo ( resp , OvhLoadBalancingTask . class ) ;
public class IOGroovyMethods { /** * Overloads the leftShift operator to provide an append mechanism to add bytes to a stream . * @ param self an OutputStream * @ param value a value to append * @ return an OutputStream * @ throws IOException if an I / O error occurs . * @ since 1.0 */ public static OutputStream leftShift ( OutputStream self , byte [ ] value ) throws IOException { } }
self . write ( value ) ; self . flush ( ) ; return self ;
public class Ifc4PackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public EClass getIfcFurnishingElement ( ) { } }
if ( ifcFurnishingElementEClass == null ) { ifcFurnishingElementEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( Ifc4Package . eNS_URI ) . getEClassifiers ( ) . get ( 296 ) ; } return ifcFurnishingElementEClass ;
public class ContentPanePainter { /** * DOCUMENT ME ! * @ param type DOCUMENT ME ! * @ return DOCUMENT ME ! */ private TwoColors getRootPaneInteriorColors ( Which type ) { } }
switch ( type ) { case BACKGROUND_ENABLED_WINDOWFOCUSED : return rootPaneActive ; case BACKGROUND_ENABLED : return rootPaneInactive ; } return null ;
public class SegmentSlic { /** * initialize all the clusters at regularly spaced intervals . Their locations are perturbed a bit to reduce * the likelihood of a bad location . Initial color is set to the image color at the location */ protected void initializeClusters ( ) { } }
int offsetX = Math . max ( BORDER , ( ( input . width - 1 ) % gridInterval ) / 2 ) ; int offsetY = Math . max ( BORDER , ( ( input . height - 1 ) % gridInterval ) / 2 ) ; int clusterId = 0 ; clusters . reset ( ) ; for ( int y = offsetY ; y < input . height - BORDER ; y += gridInterval ) { for ( int x = offsetX ; x < input . width - BORDER ; x += gridInterval ) { Cluster c = clusters . grow ( ) ; c . id = clusterId ++ ; if ( c . color == null ) c . color = new float [ numBands ] ; // sets the location and color at the local minimal gradient point perturbCenter ( c , x , y ) ; } }
public class ClusterManagerMetrics { /** * Get all the possible end states ( non running ) of the session * @ return The list of session status that are the non - running state */ private static List < SessionStatus > getEndStates ( ) { } }
List < SessionStatus > endStatesRet = new ArrayList < SessionStatus > ( ) ; for ( SessionStatus s : SessionStatus . values ( ) ) { if ( s != SessionStatus . RUNNING ) { endStatesRet . add ( s ) ; } } return endStatesRet ;
public class EvolvingImages { /** * This method is called from within the constructor to initialize the form . * WARNING : Do NOT modify this code . The content of this method is always * regenerated by the Form Editor . */ @ SuppressWarnings ( "unchecked" ) // < editor - fold defaultstate = " collapsed " desc = " Generated Code " > / / GEN - BEGIN : initComponents private void initComponents ( ) { } }
java . awt . GridBagConstraints gridBagConstraints ; imagePanel = new javax . swing . JPanel ( ) ; imageSplitPane = new javax . swing . JSplitPane ( ) ; origImagePanel = new javax . swing . JPanel ( ) ; polygonImagePanel = new javax . swing . JPanel ( ) ; buttonPanel = new javax . swing . JPanel ( ) ; startButton = new javax . swing . JButton ( ) ; stopButton = new javax . swing . JButton ( ) ; openButton = new javax . swing . JButton ( ) ; pauseButton = new javax . swing . JButton ( ) ; saveButton = new javax . swing . JButton ( ) ; resultPanel = new javax . swing . JPanel ( ) ; bestEvolutionResultPanel = new io . jenetics . example . image . EvolutionResultPanel ( ) ; currentevolutionResultPanel = new io . jenetics . example . image . EvolutionResultPanel ( ) ; engineParamPanel = new io . jenetics . example . image . EngineParamPanel ( ) ; setDefaultCloseOperation ( javax . swing . WindowConstants . EXIT_ON_CLOSE ) ; setTitle ( "Evolving images" ) ; imagePanel . setBackground ( new java . awt . Color ( 153 , 153 , 153 ) ) ; imagePanel . setLayout ( new java . awt . GridLayout ( 1 , 1 ) ) ; imageSplitPane . setDividerLocation ( 300 ) ; origImagePanel . setBorder ( javax . swing . BorderFactory . createTitledBorder ( "Source image" ) ) ; origImagePanel . setName ( "" ) ; // NOI18N origImagePanel . addComponentListener ( new java . awt . event . ComponentAdapter ( ) { public void componentResized ( java . awt . event . ComponentEvent evt ) { origImagePanelComponentResized ( evt ) ; } } ) ; origImagePanel . setLayout ( new java . awt . BorderLayout ( ) ) ; imageSplitPane . setLeftComponent ( origImagePanel ) ; polygonImagePanel . setBorder ( javax . swing . BorderFactory . createTitledBorder ( "Polygon image" ) ) ; polygonImagePanel . setLayout ( new java . awt . GridLayout ( 1 , 1 ) ) ; imageSplitPane . setRightComponent ( polygonImagePanel ) ; imagePanel . add ( imageSplitPane ) ; startButton . setText ( "Start" ) ; startButton . addActionListener ( new java . awt . event . ActionListener ( ) { public void actionPerformed ( java . awt . event . ActionEvent evt ) { startButtonActionPerformed ( evt ) ; } } ) ; stopButton . setText ( "Stop" ) ; stopButton . addActionListener ( new java . awt . event . ActionListener ( ) { public void actionPerformed ( java . awt . event . ActionEvent evt ) { stopButtonActionPerformed ( evt ) ; } } ) ; openButton . setText ( "Open" ) ; openButton . addActionListener ( new java . awt . event . ActionListener ( ) { public void actionPerformed ( java . awt . event . ActionEvent evt ) { openButtonActionPerformed ( evt ) ; } } ) ; pauseButton . setText ( "Pause" ) ; pauseButton . addActionListener ( new java . awt . event . ActionListener ( ) { public void actionPerformed ( java . awt . event . ActionEvent evt ) { pauseButtonActionPerformed ( evt ) ; } } ) ; saveButton . setText ( "Save" ) ; saveButton . addActionListener ( new java . awt . event . ActionListener ( ) { public void actionPerformed ( java . awt . event . ActionEvent evt ) { saveButtonActionPerformed ( evt ) ; } } ) ; javax . swing . GroupLayout buttonPanelLayout = new javax . swing . GroupLayout ( buttonPanel ) ; buttonPanel . setLayout ( buttonPanelLayout ) ; buttonPanelLayout . setHorizontalGroup ( buttonPanelLayout . createParallelGroup ( javax . swing . GroupLayout . Alignment . LEADING ) . addGroup ( buttonPanelLayout . createSequentialGroup ( ) . addContainerGap ( ) . addGroup ( buttonPanelLayout . createParallelGroup ( javax . swing . GroupLayout . Alignment . LEADING ) . addComponent ( startButton , javax . swing . GroupLayout . DEFAULT_SIZE , javax . swing . GroupLayout . DEFAULT_SIZE , Short . MAX_VALUE ) . addComponent ( stopButton , javax . swing . GroupLayout . DEFAULT_SIZE , javax . swing . GroupLayout . DEFAULT_SIZE , Short . MAX_VALUE ) . addComponent ( openButton , javax . swing . GroupLayout . DEFAULT_SIZE , 119 , Short . MAX_VALUE ) . addComponent ( pauseButton , javax . swing . GroupLayout . DEFAULT_SIZE , javax . swing . GroupLayout . DEFAULT_SIZE , Short . MAX_VALUE ) . addComponent ( saveButton , javax . swing . GroupLayout . DEFAULT_SIZE , javax . swing . GroupLayout . DEFAULT_SIZE , Short . MAX_VALUE ) ) . addContainerGap ( ) ) ) ; buttonPanelLayout . setVerticalGroup ( buttonPanelLayout . createParallelGroup ( javax . swing . GroupLayout . Alignment . LEADING ) . addGroup ( buttonPanelLayout . createSequentialGroup ( ) . addContainerGap ( ) . addComponent ( startButton ) . addPreferredGap ( javax . swing . LayoutStyle . ComponentPlacement . RELATED ) . addComponent ( stopButton ) . addPreferredGap ( javax . swing . LayoutStyle . ComponentPlacement . RELATED ) . addComponent ( pauseButton ) . addPreferredGap ( javax . swing . LayoutStyle . ComponentPlacement . RELATED , 196 , Short . MAX_VALUE ) . addComponent ( openButton ) . addPreferredGap ( javax . swing . LayoutStyle . ComponentPlacement . RELATED ) . addComponent ( saveButton ) . addContainerGap ( ) ) ) ; resultPanel . setLayout ( new java . awt . GridBagLayout ( ) ) ; bestEvolutionResultPanel . setBorder ( javax . swing . BorderFactory . createTitledBorder ( "Best" ) ) ; gridBagConstraints = new java . awt . GridBagConstraints ( ) ; gridBagConstraints . gridx = 1 ; gridBagConstraints . gridy = 0 ; gridBagConstraints . fill = java . awt . GridBagConstraints . HORIZONTAL ; gridBagConstraints . anchor = java . awt . GridBagConstraints . NORTHWEST ; gridBagConstraints . weightx = 1.0 ; gridBagConstraints . insets = new java . awt . Insets ( 2 , 2 , 2 , 2 ) ; resultPanel . add ( bestEvolutionResultPanel , gridBagConstraints ) ; currentevolutionResultPanel . setBorder ( javax . swing . BorderFactory . createTitledBorder ( "Current" ) ) ; gridBagConstraints = new java . awt . GridBagConstraints ( ) ; gridBagConstraints . gridx = 0 ; gridBagConstraints . gridy = 0 ; gridBagConstraints . fill = java . awt . GridBagConstraints . HORIZONTAL ; gridBagConstraints . anchor = java . awt . GridBagConstraints . NORTHWEST ; gridBagConstraints . weightx = 1.0 ; resultPanel . add ( currentevolutionResultPanel , gridBagConstraints ) ; engineParamPanel . setBorder ( javax . swing . BorderFactory . createTitledBorder ( "Engine parameter" ) ) ; gridBagConstraints = new java . awt . GridBagConstraints ( ) ; gridBagConstraints . gridx = 0 ; gridBagConstraints . gridy = 1 ; gridBagConstraints . gridwidth = 2 ; gridBagConstraints . fill = java . awt . GridBagConstraints . HORIZONTAL ; resultPanel . add ( engineParamPanel , gridBagConstraints ) ; javax . swing . GroupLayout layout = new javax . swing . GroupLayout ( getContentPane ( ) ) ; getContentPane ( ) . setLayout ( layout ) ; layout . setHorizontalGroup ( layout . createParallelGroup ( javax . swing . GroupLayout . Alignment . LEADING ) . addGroup ( layout . createSequentialGroup ( ) . addContainerGap ( ) . addGroup ( layout . createParallelGroup ( javax . swing . GroupLayout . Alignment . LEADING ) . addComponent ( resultPanel , javax . swing . GroupLayout . DEFAULT_SIZE , 788 , Short . MAX_VALUE ) . addGroup ( layout . createSequentialGroup ( ) . addComponent ( imagePanel , javax . swing . GroupLayout . DEFAULT_SIZE , javax . swing . GroupLayout . DEFAULT_SIZE , Short . MAX_VALUE ) . addPreferredGap ( javax . swing . LayoutStyle . ComponentPlacement . UNRELATED ) . addComponent ( buttonPanel , javax . swing . GroupLayout . PREFERRED_SIZE , javax . swing . GroupLayout . DEFAULT_SIZE , javax . swing . GroupLayout . PREFERRED_SIZE ) ) ) . addContainerGap ( ) ) ) ; layout . setVerticalGroup ( layout . createParallelGroup ( javax . swing . GroupLayout . Alignment . LEADING ) . addGroup ( javax . swing . GroupLayout . Alignment . TRAILING , layout . createSequentialGroup ( ) . addContainerGap ( ) . addGroup ( layout . createParallelGroup ( javax . swing . GroupLayout . Alignment . LEADING ) . addComponent ( buttonPanel , javax . swing . GroupLayout . DEFAULT_SIZE , javax . swing . GroupLayout . DEFAULT_SIZE , Short . MAX_VALUE ) . addComponent ( imagePanel , javax . swing . GroupLayout . DEFAULT_SIZE , javax . swing . GroupLayout . DEFAULT_SIZE , Short . MAX_VALUE ) ) . addPreferredGap ( javax . swing . LayoutStyle . ComponentPlacement . RELATED ) . addComponent ( resultPanel , javax . swing . GroupLayout . PREFERRED_SIZE , javax . swing . GroupLayout . DEFAULT_SIZE , javax . swing . GroupLayout . PREFERRED_SIZE ) . addContainerGap ( ) ) ) ; pack ( ) ;
public class TriggersInner { /** * Get a specific trigger by name . * @ param deviceName The device name . * @ param name The trigger name . * @ param resourceGroupName The resource group name . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the TriggerInner object */ public Observable < TriggerInner > getAsync ( String deviceName , String name , String resourceGroupName ) { } }
return getWithServiceResponseAsync ( deviceName , name , resourceGroupName ) . map ( new Func1 < ServiceResponse < TriggerInner > , TriggerInner > ( ) { @ Override public TriggerInner call ( ServiceResponse < TriggerInner > response ) { return response . body ( ) ; } } ) ;
public class EmbedVaadinConfig { /** * Cleans the specified context path . Adds a forward slash if one * is missing and make sure to interpret both the empty string and * ' / ' as the root context . * @ param contextPath the context path to clean * @ return the context path to use */ static String cleanContextPath ( String contextPath ) { } }
// Special handling so that / can be used for the root context as well if ( contextPath . equals ( "/" ) || contextPath . trim ( ) . equals ( "" ) ) { return "" ; } else if ( ! contextPath . startsWith ( "/" ) ) { return "/" + contextPath ; } else { return contextPath ; }
public class IntHashMap { /** * Returns the value to which this map maps the specified key . Returns * < tt > null < / tt > if the map contains no mapping for this key . A return * value of < tt > null < / tt > does not < i > necessarily < / i > indicate that the * map contains no mapping for the key ; it ' s also possible that the map * explicitly maps the key to < tt > null < / tt > . The < tt > containsKey < / tt > * operation may be used to distinguish these two cases . * @ return the value to which this map maps the specified key . * @ param key key whose associated value is to be returned . */ public V get ( int key ) { } }
Entry < V > tab [ ] = table ; int index = ( key & 0x7fffffff ) % tab . length ; for ( Entry < V > e = tab [ index ] ; e != null ; e = e . next ) { if ( e . key == key ) { return e . value ; } } return null ;
public class NodeUtil { /** * Whether a simple name is referenced within the node tree . */ static boolean isNameReferenced ( Node node , String name , Predicate < Node > traverseChildrenPred ) { } }
return has ( node , new MatchNameNode ( name ) , traverseChildrenPred ) ;