signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class RetryPolicy { /** * Resets the internal counter . This should be called whenever a new record is added , and whenever all * { @ link ShareRequest } completed successfully . * @ param context the { @ link Context } . */ static void reset ( Context context ) { } }
Context appContext = context . getApplicationContext ( ) ; String key = appContext . getString ( R . string . wings__retry_policy_consecutive_fails ) ; // Synchronously reset consecutive fail count . SharedPreferences preferences = PreferenceManager . getDefaultSharedPreferences ( appContext ) ; Editor editor = preferences . edit ( ) ; editor . putInt ( key , 0 ) ; editor . commit ( ) ;
public class JFinalConfigExt { /** * = = = = = Override */ @ Override public String getProperty ( String key ) { } }
String p = super . getProperty ( key ) ; if ( StrKit . isBlank ( p ) ) { new IllegalArgumentException ( "`" + key + "` Cannot be empty, set `" + key + "` in " + cfg + " file" ) ; } return p ;
public class XStringForFSB { /** * Returns a new string that is a substring of this string . The * substring begins with the character at the specified index and * extends to the end of this string . < p > * Examples : * < blockquote > < pre > * " unhappy " . substring ( 2 ) returns " happy " * " Harbison " . substring ( 3 ) returns " bison " * " emptiness " . substring ( 9 ) returns " " ( an empty string ) * < / pre > < / blockquote > * @ param beginIndex the beginning index , inclusive . * @ return the specified substring . * @ exception IndexOutOfBoundsException if * < code > beginIndex < / code > is negative or larger than the * length of this < code > String < / code > object . */ public XMLString substring ( int beginIndex ) { } }
int len = m_length - beginIndex ; if ( len <= 0 ) return XString . EMPTYSTRING ; else { int start = m_start + beginIndex ; return new XStringForFSB ( fsb ( ) , start , len ) ; }
public class DiscussionCommentResourcesImpl { /** * Update the specified comment * It mirrors to the following Smartsheet REST API method : PUT / sheets / { sheetId } / comments / { commentId } * @ param sheetId the sheet id * @ param comment the new comment object * @ return the updated comment * @ throws IllegalArgumentException if any argument is null or empty string * @ throws InvalidRequestException if there is any problem with the REST API request * @ throws AuthorizationException if there is any problem with the REST API authorization ( access token ) * @ throws ResourceNotFoundException if the resource cannot be found * @ throws ServiceUnavailableException if the REST API service is not available ( possibly due to rate limiting ) * @ throws SmartsheetException if there is any other error during the operation */ public Comment updateComment ( long sheetId , Comment comment ) throws SmartsheetException { } }
return this . updateResource ( "sheets/" + sheetId + "/comments/" + comment . getId ( ) , Comment . class , comment ) ;
public class ApiOvhHostingweb { /** * Alter this object properties * REST : PUT / hosting / web / { serviceName } / ownLogs / { id } / userLogs / { login } * @ param body [ required ] New object properties * @ param serviceName [ required ] The internal name of your hosting * @ param id [ required ] Id of the object * @ param login [ required ] The userLogs login used to connect to logs . ovh . net */ public void serviceName_ownLogs_id_userLogs_login_PUT ( String serviceName , Long id , String login , OvhUserLogs body ) throws IOException { } }
String qPath = "/hosting/web/{serviceName}/ownLogs/{id}/userLogs/{login}" ; StringBuilder sb = path ( qPath , serviceName , id , login ) ; exec ( qPath , "PUT" , sb . toString ( ) , body ) ;
public class SccpFlowControl { /** * IT , DT2 , AK */ public void initializeMessageNumbering ( SccpConnDt2MessageImpl msg ) { } }
sendSequenceNumber = getNextSequenceNumber ( ) ; msg . setSequencing ( sendSequenceNumber , sendSequenceNumberExpectedAtInput ) ; inputWindow . setLowerEdge ( sendSequenceNumberExpectedAtInput ) ;
public class AbstractFsCheckpointStorage { /** * Takes the given string ( representing a pointer to a checkpoint ) and resolves it to a file * status for the checkpoint ' s metadata file . * @ param checkpointPointer The pointer to resolve . * @ return A state handle to checkpoint / savepoint ' s metadata . * @ throws IOException Thrown , if the pointer cannot be resolved , the file system not accessed , or * the pointer points to a location that does not seem to be a checkpoint / savepoint . */ protected static CompletedCheckpointStorageLocation resolveCheckpointPointer ( String checkpointPointer ) throws IOException { } }
checkNotNull ( checkpointPointer , "checkpointPointer" ) ; checkArgument ( ! checkpointPointer . isEmpty ( ) , "empty checkpoint pointer" ) ; // check if the pointer is in fact a valid file path final Path path ; try { path = new Path ( checkpointPointer ) ; } catch ( Exception e ) { throw new IOException ( "Checkpoint/savepoint path '" + checkpointPointer + "' is not a valid file URI. " + "Either the pointer path is invalid, or the checkpoint was created by a different state backend." ) ; } // check if the file system can be accessed final FileSystem fs ; try { fs = path . getFileSystem ( ) ; } catch ( IOException e ) { throw new IOException ( "Cannot access file system for checkpoint/savepoint path '" + checkpointPointer + "'." , e ) ; } final FileStatus status ; try { status = fs . getFileStatus ( path ) ; } catch ( FileNotFoundException e ) { throw new FileNotFoundException ( "Cannot find checkpoint or savepoint " + "file/directory '" + checkpointPointer + "' on file system '" + fs . getUri ( ) . getScheme ( ) + "'." ) ; } // if we are here , the file / directory exists final Path checkpointDir ; final FileStatus metadataFileStatus ; // If this is a directory , we need to find the meta data file if ( status . isDir ( ) ) { checkpointDir = status . getPath ( ) ; final Path metadataFilePath = new Path ( path , METADATA_FILE_NAME ) ; try { metadataFileStatus = fs . getFileStatus ( metadataFilePath ) ; } catch ( FileNotFoundException e ) { throw new FileNotFoundException ( "Cannot find meta data file '" + METADATA_FILE_NAME + "' in directory '" + path + "'. Please try to load the checkpoint/savepoint " + "directly from the metadata file instead of the directory." ) ; } } else { // this points to a file and we either do no name validation , or // the name is actually correct , so we can return the path metadataFileStatus = status ; checkpointDir = status . getPath ( ) . getParent ( ) ; } final FileStateHandle metaDataFileHandle = new FileStateHandle ( metadataFileStatus . getPath ( ) , metadataFileStatus . getLen ( ) ) ; final String pointer = checkpointDir . makeQualified ( fs ) . toString ( ) ; return new FsCompletedCheckpointStorageLocation ( fs , checkpointDir , metaDataFileHandle , pointer ) ;
public class TextComponentUtil { /** * Returns the start of the previous line if that line is only whitespace . Returns - 1 otherwise . */ public static int getWhiteSpaceLineStartBefore ( String script , int start ) { } }
int startLine = getLineStart ( script , start ) ; if ( startLine > 0 ) { int nextLineEnd = startLine - 1 ; int previousLineStart = getLineStart ( script , nextLineEnd ) ; boolean whitespace = GosuStringUtil . isWhitespace ( script . substring ( previousLineStart , nextLineEnd ) ) ; if ( whitespace ) { return previousLineStart ; } } return - 1 ;
public class OAuth2ConnectionFactory { /** * Create a OAuth2 - based { @ link Connection } from the connection data . * @ param data connection data from which to create the connection */ public Connection < S > createConnection ( ConnectionData data ) { } }
return new OAuth2Connection < S > ( data , getOAuth2ServiceProvider ( ) , getApiAdapter ( ) ) ;
public class DeferredAttr { /** * Routine that performs speculative type - checking ; the input AST node is * cloned ( to avoid side - effects cause by Attr ) and compiler state is * restored after type - checking . All diagnostics ( but critical ones ) are * disabled during speculative type - checking . */ JCTree attribSpeculative ( JCTree tree , Env < AttrContext > env , ResultInfo resultInfo ) { } }
return attribSpeculative ( tree , env , resultInfo , treeCopier , ( newTree ) -> new DeferredAttrDiagHandler ( log , newTree ) , null ) ;
public class MtasFieldsProducer { /** * Adds the index input to list . * @ param name the name * @ param in the in * @ param postingsFormatName the postings format name * @ return the string * @ throws IOException Signals that an I / O exception has occurred . */ private String addIndexInputToList ( String name , IndexInput in , String postingsFormatName ) throws IOException { } }
if ( indexInputList . get ( name ) != null ) { indexInputList . get ( name ) . close ( ) ; } if ( in != null ) { String localPostingsFormatName = postingsFormatName ; if ( localPostingsFormatName == null ) { localPostingsFormatName = in . readString ( ) ; } else if ( ! in . readString ( ) . equals ( localPostingsFormatName ) ) { throw new IOException ( "delegate codec " + name + " doesn't equal " + localPostingsFormatName ) ; } indexInputList . put ( name , in ) ; indexInputOffsetList . put ( name , in . getFilePointer ( ) ) ; return localPostingsFormatName ; } else { log . debug ( "no " + name + " registered" ) ; return null ; }
public class ServerService { /** * Create snapshot of a single server or group of servers * @ param expirationDays expiration days ( must be between 1 and 10) * @ param serverFilter search servers criteria * @ return OperationFuture wrapper for BaseServerResponse list */ public OperationFuture < List < Server > > createSnapshot ( Integer expirationDays , ServerFilter serverFilter ) { } }
List < Server > serverList = findServers ( serverFilter ) ; return powerOperationResponse ( serverList , "Create Snapshot" , client . createSnapshot ( new CreateSnapshotRequest ( ) . snapshotExpirationDays ( expirationDays ) . serverIds ( ids ( serverList ) ) ) ) ;
public class AsyncFacebookRunner { /** * Make a request to the Facebook Graph API without any parameters . * See http : / / developers . facebook . com / docs / api * Note that this method is asynchronous and the callback will be invoked * in a background thread ; operations that affect the UI will need to be * posted to the UI thread or an appropriate handler . * This method is deprecated . See { @ link Facebook } and { @ link com . facebook . Request } for more info . * @ param graphPath * Path to resource in the Facebook graph , e . g . , to fetch data * about the currently logged authenticated user , provide " me " , * which will fetch http : / / graph . facebook . com / me * @ param listener * Callback interface to notify the application when the request * has completed . * @ param state * An arbitrary object used to identify the request when it * returns to the callback . This has no effect on the request * itself . */ @ Deprecated public void request ( String graphPath , RequestListener listener , final Object state ) { } }
request ( graphPath , new Bundle ( ) , "GET" , listener , state ) ;
public class PinyinUtil { /** * 转换List < Pinyin > pinyinList到List < String > , 其中的String为带声调符号形式 * @ param pinyinList * @ return */ public static List < String > convertPinyinList2TonePinyinList ( List < Pinyin > pinyinList ) { } }
List < String > tonePinyinList = new ArrayList < String > ( pinyinList . size ( ) ) ; for ( Pinyin pinyin : pinyinList ) { tonePinyinList . add ( pinyin . getPinyinWithToneMark ( ) ) ; } return tonePinyinList ;
public class Logger { /** * Issue a log message with a level of INFO using { @ link java . text . MessageFormat } - style formatting . * @ param t the throwable * @ param format the message format string * @ param params the parameters */ public void infov ( Throwable t , String format , Object ... params ) { } }
doLog ( Level . INFO , FQCN , format , params , t ) ;
public class RsXembly { /** * Render source as XML . * @ param dom DOM node to build upon * @ param src Source * @ return XML * @ throws IOException If fails */ private static InputStream render ( final Node dom , final XeSource src ) throws IOException { } }
final Node copy = cloneNode ( dom ) ; final ByteArrayOutputStream baos = new ByteArrayOutputStream ( ) ; final Node node = new Xembler ( src . toXembly ( ) ) . applyQuietly ( copy ) ; try { TransformerFactory . newInstance ( ) . newTransformer ( ) . transform ( new DOMSource ( node ) , new StreamResult ( new Utf8OutputStreamContent ( baos ) ) ) ; } catch ( final TransformerException ex ) { throw new IllegalStateException ( ex ) ; } return new ByteArrayInputStream ( baos . toByteArray ( ) ) ;
public class MetaDataService { /** * Retrieve series list of the specified metric * @ param metricName metric name * @ param entityName entity name ' s filter * @ return list of series */ public List < Series > retrieveMetricSeries ( String metricName , String entityName ) { } }
return retrieveMetricSeries ( metricName , Collections . singletonMap ( "entity" , entityName ) ) ;
public class TrainingsImpl { /** * Associate a set of images with a set of tags . * @ param projectId The project id * @ param createImageTagsOptionalParameter the object representing the optional parameters to be set before calling this API * @ param serviceCallback the async ServiceCallback to handle successful and failed responses . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the { @ link ServiceFuture } object */ public ServiceFuture < ImageTagCreateSummary > createImageTagsAsync ( UUID projectId , CreateImageTagsOptionalParameter createImageTagsOptionalParameter , final ServiceCallback < ImageTagCreateSummary > serviceCallback ) { } }
return ServiceFuture . fromResponse ( createImageTagsWithServiceResponseAsync ( projectId , createImageTagsOptionalParameter ) , serviceCallback ) ;
public class ListCommandInvocationsResult { /** * ( Optional ) A list of all invocations . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setCommandInvocations ( java . util . Collection ) } or { @ link # withCommandInvocations ( java . util . Collection ) } if * you want to override the existing values . * @ param commandInvocations * ( Optional ) A list of all invocations . * @ return Returns a reference to this object so that method calls can be chained together . */ public ListCommandInvocationsResult withCommandInvocations ( CommandInvocation ... commandInvocations ) { } }
if ( this . commandInvocations == null ) { setCommandInvocations ( new com . amazonaws . internal . SdkInternalList < CommandInvocation > ( commandInvocations . length ) ) ; } for ( CommandInvocation ele : commandInvocations ) { this . commandInvocations . add ( ele ) ; } return this ;
public class StringUtils { /** * Concatenate the given String arrays into one , with overlapping array * elements included twice . * The order of elements in the original arrays is preserved . * @ param array1 the first array ( can be < code > null < / code > ) * @ param array2 the second array ( can be < code > null < / code > ) * @ return the new array ( < code > null < / code > if both given arrays were * < code > null < / code > ) */ public static String [ ] concatenateStringArrays ( String [ ] array1 , String [ ] array2 ) { } }
if ( isEmpty ( array1 ) ) { return array2 ; } if ( isEmpty ( array2 ) ) { return array1 ; } String [ ] newArr = new String [ array1 . length + array2 . length ] ; System . arraycopy ( array1 , 0 , newArr , 0 , array1 . length ) ; System . arraycopy ( array2 , 0 , newArr , array1 . length , array2 . length ) ; return newArr ;
public class DatabaseContentReader { /** * / * in case of failover , use init ( ) instead of initialize ( ) for retry */ private void init ( ) throws IOException , InterruptedException { } }
nakedDone = false ; // initialize the total length float recToFragRatio = conf . getFloat ( RECORD_TO_FRAGMENT_RATIO , getDefaultRatio ( ) ) ; length = mlSplit . getLength ( ) * recToFragRatio ; // generate the query String queryText ; long start = mlSplit . getStart ( ) + 1 + count ; long end = mlSplit . isLastSplit ( ) ? Long . MAX_VALUE : start + mlSplit . getLength ( ) - count - 1 ; String src = conf . get ( MarkLogicConstants . DOCUMENT_SELECTOR ) ; redactionRuleCol = conf . getStrings ( REDACTION_RULE_COLLECTION ) ; Collection < String > nsCol = null ; if ( src != null ) { nsCol = conf . getStringCollection ( MarkLogicConstants . PATH_NAMESPACE ) ; } else { src = "fn:collection()" ; } ctsQuery = conf . get ( MarkLogicConstants . QUERY_FILTER ) ; StringBuilder buf = new StringBuilder ( ) ; if ( ctsQuery != null ) { buildSearchQuery ( src , ctsQuery , nsCol , buf ) ; } else { buildDocExprQuery ( src , nsCol , null , buf ) ; } src = buf . toString ( ) ; buf = new StringBuilder ( ) ; buf . append ( "xquery version \"1.0-ml\"; \n" ) ; buf . append ( "import module namespace hadoop = " ) ; buf . append ( "\"http://marklogic.com/xdmp/hadoop\" at " ) ; buf . append ( "\"/MarkLogic/hadoop.xqy\";\n" ) ; if ( redactionRuleCol != null ) { buf . append ( "import module namespace rdt = \"http://marklogic.com/xdmp/redaction\" at \"/MarkLogic/redaction.xqy\";\n" ) ; } buf . append ( "declare namespace mlmr=\"http://marklogic.com/hadoop\";\n" ) ; buf . append ( "declare option xdmp:output \"indent=no\";\n" ) ; buf . append ( "declare option xdmp:output \"indent-untyped=no\";\n" ) ; buf . append ( "declare variable $mlmr:splitstart as xs:integer external;\n" ) ; buf . append ( "declare variable $mlmr:splitend as xs:integer external;\n" ) ; buf . append ( "let $cols := " ) ; buf . append ( src ) ; buf . append ( "\nlet $all-meta :=" ) ; buf . append ( "\nfor $doc in $cols" ) ; buf . append ( "\nlet $uri := fn:base-uri($doc)\n return (" ) ; buf . append ( "'META'," ) ; buf . append ( "$uri," ) ; buf . append ( "if(fn:empty($doc/node())) then 0 " ) ; buf . append ( "else if (fn:count($doc/node())>1) then \"element\" " ) ; buf . append ( "else xdmp:node-kind($doc/node())" ) ; if ( copyCollection || copyPermission || copyProperties || copyQuality ) { buf . append ( "," ) ; if ( copyCollection ) { buf . append ( "xdmp:document-get-collections($uri),\n" ) ; } if ( copyPermission ) { buf . append ( "let $list := xdmp:document-get-permissions($uri)\n" ) ; buf . append ( "return hadoop:get-permissions($list)," ) ; } // if copy - quality , else + 0 if ( copyQuality ) { buf . append ( "xdmp:document-get-quality($uri),\n" ) ; } else { buf . append ( "0," ) ; } // if copy - metadata if ( copyMetadata ) { buf . append ( "(let $f := fn:function-lookup(xs:QName('xdmp:document-get-metadata'),1)\n" + "return if (exists($f)) then $f($uri) else ()),\n" ) ; } // if copy - properties , else + ( ) , \ n if ( copyProperties ) { buf . append ( "xdmp:document-properties($uri)/prop:properties,\n" ) ; } } else { buf . append ( ",0," ) ; // quality buf . append ( "(),\n" ) ; // properties } // end - of - record marker buf . append ( "0" ) ; buf . append ( " )\n" ) ; buf . append ( "return ($all-meta" ) ; buf . append ( ",'EOM',$cols)" ) ; queryText = buf . toString ( ) ; if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( queryText ) ; } // set up a connection to the server while ( retry < maxRetries ) { try { if ( retry == 1 ) { LOG . info ( "Retrying connect" ) ; } String curForestName = "" ; String curHostName = "" ; if ( curForest == - 1 ) { curForestName = mlSplit . getForestId ( ) . toString ( ) ; curHostName = hostNames [ 0 ] ; } else { curForestName = replicas . get ( curForest ) . getForest ( ) ; curHostName = replicas . get ( curForest ) . getHostName ( ) ; } ContentSource cs = InternalUtilities . getInputContentSource ( conf , curHostName ) ; session = cs . newSession ( "#" + curForestName ) ; if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( "Connect to forest " + curForestName + " on " + session . getConnectionUri ( ) . getHost ( ) ) ; } AdhocQuery aquery = session . newAdhocQuery ( queryText ) ; aquery . setNewIntegerVariable ( MR_NAMESPACE , SPLIT_START_VARNAME , start ) ; aquery . setNewIntegerVariable ( MR_NAMESPACE , SPLIT_END_VARNAME , end ) ; RequestOptions options = new RequestOptions ( ) ; options . setCacheResult ( false ) ; String ts = conf . get ( INPUT_QUERY_TIMESTAMP ) ; if ( ts != null ) { options . setEffectivePointInTime ( new BigInteger ( ts ) ) ; if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( "Query timestamp: " + ts ) ; } } aquery . setOptions ( options ) ; result = session . submitRequest ( aquery ) ; initMetadataMap ( ) ; } catch ( XccConfigException e ) { LOG . error ( "XccConfigException:" + e ) ; throw new IOException ( e ) ; } catch ( QueryException e ) { LOG . error ( "QueryException:" + e ) ; LOG . debug ( "Query: " + queryText ) ; throw new IOException ( e ) ; } catch ( Exception e ) { LOG . error ( "Exception:" + e . getMessage ( ) ) ; if ( curForest != - 1 ) { if ( ++ retry < maxRetries ) { // failover try { Thread . sleep ( sleepTime ) ; } catch ( Exception e2 ) { } sleepTime = Math . min ( sleepTime * 2 , maxSleepTime ) ; curForest = ( curForest + 1 ) % replicas . size ( ) ; continue ; } LOG . info ( "Retry limit exceeded" ) ; } throw new IOException ( e ) ; } break ; }
public class LTieSrtConsumerBuilder { /** * One of ways of creating builder . This is possibly the least verbose way where compiler should be able to guess the generic parameters . */ @ Nonnull public static < T > LTieSrtConsumer < T > tieSrtConsumerFrom ( Consumer < LTieSrtConsumerBuilder < T > > buildingFunction ) { } }
LTieSrtConsumerBuilder builder = new LTieSrtConsumerBuilder ( ) ; buildingFunction . accept ( builder ) ; return builder . build ( ) ;
public class ControlCreateDurableImpl { /** * / * ( non - Javadoc ) * @ see com . ibm . ws . sib . mfp . control . ControlCreateDurable # isNoLocal ( ) */ public final boolean isNoLocal ( ) { } }
boolean result = false ; // if the value is unset then it has come from an environment with back level schema if ( jmo . getChoiceField ( ControlAccess . BODY_CREATEDURABLE_NOLOCAL ) != ControlAccess . IS_BODY_CREATEDURABLE_NOLOCAL_UNSET ) { result = jmo . getBooleanField ( ControlAccess . BODY_CREATEDURABLE_NOLOCAL_VALUE ) ; } return result ;
public class RiakIndex { /** * Return the values in this index . * The returned { @ code Set } is unmodifiable . * @ return an unmodifiable view of the values in this index . */ public final Set < T > values ( ) { } }
Set < T > convertedValues = new HashSet < > ( ) ; for ( BinaryValue baw : values ) { convertedValues . add ( convert ( baw ) ) ; } return Collections . unmodifiableSet ( convertedValues ) ;
public class VertexDescription { /** * Checks if the given value is the default one . The simple equality test * with GetDefaultValue does not work due to the use of NaNs as default * value for some parameters . */ public static boolean isDefaultValue ( int semantics , double v ) { } }
return NumberUtils . doubleToInt64Bits ( _defaultValues [ semantics ] ) == NumberUtils . doubleToInt64Bits ( v ) ;
public class JobsModule { /** * Allows { @ code Job } scheduling , delegating Guice create the { @ code Job } instance * and inject members . * If given { @ code Job } class is annotated with { @ link Scheduled } , then { @ code Job } * and related { @ code Trigger } values will be extracted from it . * @ param jobClass The { @ code Job } has to be scheduled * @ return The { @ code Job } builder or null if the job class may not be registered */ protected final JobSchedulerBuilder scheduleJob ( Class < ? extends Job > jobClass ) { } }
checkNotNull ( jobClass , "Argument 'jobClass' must be not null." ) ; if ( ! RequireUtil . allowClass ( getSettings ( ) , jobClass ) ) { return null ; } JobSchedulerBuilder builder = new JobSchedulerBuilder ( jobClass ) ; if ( jobClass . isAnnotationPresent ( Scheduled . class ) ) { Scheduled scheduled = jobClass . getAnnotation ( Scheduled . class ) ; builder // job . withJobName ( scheduled . jobName ( ) ) . withJobGroup ( scheduled . jobGroup ( ) ) . withRequestRecovery ( scheduled . requestRecovery ( ) ) . withStoreDurably ( scheduled . storeDurably ( ) ) // trigger . withCronExpression ( scheduled . cronExpression ( ) ) . withTriggerName ( scheduled . triggerName ( ) ) ; if ( ! Scheduled . DEFAULT . equals ( scheduled . timeZoneId ( ) ) ) { TimeZone timeZone = TimeZone . getTimeZone ( scheduled . timeZoneId ( ) ) ; if ( timeZone != null ) { builder . withTimeZone ( timeZone ) ; } } } requestInjection ( builder ) ; return builder ;
public class ConnectionManagerImpl { /** * Call rollback on the underlying connection . */ public void localRollback ( ) { } }
log . info ( "Rollback was called, do rollback on current connection " + con ) ; if ( ! this . isInLocalTransaction ) { throw new PersistenceBrokerException ( "Not in transaction, cannot abort" ) ; } try { // truncate the local transaction this . isInLocalTransaction = false ; if ( ! broker . isManaged ( ) ) { if ( batchCon != null ) { batchCon . rollback ( ) ; } else if ( con != null && ! con . isClosed ( ) ) { con . rollback ( ) ; } } else { if ( log . isEnabledFor ( Logger . INFO ) ) log . info ( "Found managed environment setting in PB, will ignore rollback call on connection, this should be done by JTA" ) ; } } catch ( SQLException e ) { log . error ( "Rollback on the underlying connection failed" , e ) ; } finally { try { restoreAutoCommitState ( ) ; } catch ( OJBRuntimeException ignore ) { // Ignore or log exception } releaseConnection ( ) ; }
public class SimpleFormatter { /** * / * [ deutsch ] * < p > Konstruiert einen Formatierer f & uuml ; r einfache Zeitstempelobjekte . < / p > * @ param dateStyle format style for the date component * @ param timeStyle format style for the time component * @ param locale format locale * @ return new { @ code SimpleFormatter } - instance * @ throws IllegalStateException if no localized format pattern can be retrieved * @ since 5.0 */ public static SimpleFormatter < PlainTimestamp > ofTimestampStyle ( DisplayMode dateStyle , DisplayMode timeStyle , Locale locale ) { } }
DateFormat df = DateFormat . getDateTimeInstance ( dateStyle . getStyleValue ( ) , timeStyle . getStyleValue ( ) , locale ) ; String pattern = FormatUtils . removeZones ( getFormatPattern ( df ) ) ; return SimpleFormatter . ofTimestampPattern ( pattern , locale ) ;
public class Task { /** * Registers the executor type . DO NOT USE THIS METHOD UNLESS YOU ARE * DEVELOPING A NEW TASK EXECUTOR . * @ param executionType * the executor type */ public final void registerExecutionType ( ExecutionType executionType ) { } }
if ( executionType == null ) { throw new TaskExeception ( ExecutionType . class . getSimpleName ( ) + " NULL" ) ; } this . executionType = executionType ;
public class DateContext { /** * Check if all values in the given data are valid and that the result will * be a valid date . * @ param month The month ( 1-12) * @ param day The day ( 1-31) * @ param year The year ( 4 - digit ) * @ param hour The hour ( 0-23) * @ param minute The minutes ( 0-59) * @ param second The seconds ( 0-59) * @ param millisecond The milliseconds ( 0-999) * @ return < code > true < / code > if all data represents a valid date , * < code > false < / code > otherwise */ public boolean isValidDate ( int month , int day , int year , int hour , int minute , int second , int millisecond ) { } }
boolean valid = true ; if ( month < 1 || month > 12 ) { valid = false ; } else if ( validDay ( month , day , year ) == false ) { valid = false ; } else if ( hour < 0 || hour > 23 ) { valid = false ; } else if ( minute < 0 || minute > 59 ) { valid = false ; } else if ( second < 0 || second > 59 ) { valid = false ; } else if ( millisecond < 0 || millisecond > 999 ) { valid = false ; } return valid ;
public class FastSet { /** * { @ inheritDoc } */ @ Override public boolean removeAll ( IntSet c ) { } }
if ( c == null || c . isEmpty ( ) || isEmpty ( ) ) { return false ; } if ( c == this ) { clear ( ) ; return true ; } final FastSet other = convert ( c ) ; final int [ ] localWords = words ; // faster final int [ ] localOtherWords = other . words ; // faster // Perform logical ( a & ! b ) on words in common boolean modified = false ; for ( int i = Math . min ( firstEmptyWord , other . firstEmptyWord ) - 1 ; i >= 0 ; i -- ) { int before = localWords [ i ] ; localWords [ i ] &= ~ localOtherWords [ i ] ; modified = modified || before != localWords [ i ] ; } if ( modified ) { fixFirstEmptyWord ( ) ; size = - 1 ; } return modified ;
public class EscapeDirective { /** * TODO 挪到 StrKit 中 */ private String escape ( String str ) { } }
if ( str == null || str . length ( ) == 0 ) { return str ; } int len = str . length ( ) ; StringBuilder ret = new StringBuilder ( len * 2 ) ; for ( int i = 0 ; i < len ; i ++ ) { char cur = str . charAt ( i ) ; switch ( cur ) { case '<' : ret . append ( "&lt;" ) ; break ; case '>' : ret . append ( "&gt;" ) ; break ; case '"' : ret . append ( "&quot;" ) ; break ; case '\'' : // ret . append ( " & apos ; " ) ; / / IE 不支持 & apos ; 考虑 & # 39; ret . append ( "&#39;" ) ; break ; case '&' : ret . append ( "&amp;" ) ; break ; default : ret . append ( cur ) ; break ; } } return ret . toString ( ) ;
public class CombinationManagementPermission { /** * Adds a permission . * This method should not be called after the instance has been made visible to another thread * than the one that constructed it . * @ param permissionName name of the permission to add . Cannot be { @ code null } * @ param underlyingPermission the permission . Cannot be { @ code null } */ public void addUnderlyingPermission ( String permissionName , ManagementPermission underlyingPermission ) { } }
assert underlyingPermission . getActionEffect ( ) == getActionEffect ( ) : "incompatible ActionEffect" ; if ( combinationPolicy == CombinationPolicy . REJECTING && underlyingPermissions . size ( ) > 0 ) { throw ControllerLogger . ROOT_LOGGER . illegalMultipleRoles ( ) ; } underlyingPermissions . put ( permissionName , underlyingPermission ) ;
public class AsynchronousRequest { /** * For more info on achievements categories API go < a href = " https : / / wiki . guildwars2 . com / wiki / API : 2 / achievements / categories " > here < / a > < br / > * Give user the access to { @ link Callback # onResponse ( Call , Response ) } and { @ link Callback # onFailure ( Call , Throwable ) } methods for custom interactions * @ param ids list of achievement category id ( s ) * @ param callback callback that is going to be used for { @ link Call # enqueue ( Callback ) } * @ throws GuildWars2Exception invalid API key * @ throws NullPointerException if given { @ link Callback } is empty * @ see AchievementCategory achievement category info */ public void getAchievementCategoryInfo ( int [ ] ids , Callback < List < AchievementCategory > > callback ) throws GuildWars2Exception , NullPointerException { } }
isParamValid ( new ParamChecker ( ids ) ) ; gw2API . getAchievementCategoryInfo ( processIds ( ids ) , GuildWars2 . lang . getValue ( ) ) . enqueue ( callback ) ;
public class MemoryFileManager { /** * Returns a { @ linkplain JavaFileObject file object } for input * representing the specified class of the specified kind in the * given location . * @ param location a location * @ param className the name of a class * @ param kind the kind of file , must be one of { @ link * JavaFileObject . Kind # SOURCE SOURCE } or { @ link * JavaFileObject . Kind # CLASS CLASS } * @ return a file object , might return { @ code null } if the * file does not exist * @ throws IllegalArgumentException if the location is not known * to this file manager and the file manager does not support * unknown locations , or if the kind is not valid * @ throws IOException if an I / O error occurred , or if { @ link * # close } has been called and this file manager cannot be * reopened * @ throws IllegalStateException if { @ link # close } has been called * and this file manager cannot be reopened */ @ Override public JavaFileObject getJavaFileForInput ( JavaFileManager . Location location , String className , JavaFileObject . Kind kind ) throws IOException { } }
return stdFileManager . getJavaFileForInput ( location , className , kind ) ;
public class Predicates { /** * Returns a predicate that evaluates to { @ code true } if each of its * components evaluates to { @ code true } . The components are evaluated in * order , and evaluation will be " short - circuited " as soon as a false * predicate is found . It defensively copies the array passed in , so future * changes to it won ' t alter the behavior of this predicate . If { @ code * components } is empty , the returned predicate will always evaluate to { @ code * true } . * @ param components the components * @ return a predicate */ public static < T > Predicate < T > and ( Predicate < ? super T > ... components ) { } }
return new AndPredicate < T > ( ImmutableList . of ( components ) ) ;
public class GenderRatioProcessor { /** * Helper method that extracts the list of all { @ link ItemIdValue } objects * that are used as values in the given statement group . * @ param statementGroup * the { @ link StatementGroup } to extract the data from * @ return the list of values */ private List < EntityIdValue > getItemIdValueList ( StatementGroup statementGroup ) { } }
List < EntityIdValue > result = new ArrayList < > ( statementGroup . size ( ) ) ; for ( Statement s : statementGroup ) { Value v = s . getValue ( ) ; if ( v instanceof EntityIdValue ) { result . add ( ( EntityIdValue ) v ) ; } } return result ;
public class ObjectFactory { /** * Create an instance of { @ link JAXBElement } { @ code < } { @ link TextType } * { @ code > } */ @ XmlElementDecl ( namespace = "http://www.w3.org/2005/Atom" , name = "subtitle" , scope = SourceType . class ) public JAXBElement < TextType > createSourceTypeSubtitle ( TextType value ) { } }
return new JAXBElement < TextType > ( FEED_TYPE_SUBTITLE_QNAME , TextType . class , SourceType . class , value ) ;
public class AbstractSupervisedProjectionVectorFilter { /** * Get the output type from the input type after conversion . * @ param in input type restriction * @ param factory Vector factory * @ return output type restriction */ protected SimpleTypeInformation < ? > convertedType ( SimpleTypeInformation < ? > in , NumberVector . Factory < V > factory ) { } }
return new VectorFieldTypeInformation < > ( factory , tdim ) ;
public class DialogPreference { /** * Obtains the left and right margin of the divider , which is located above the buttons of the * dialog , which shown by the preference , from a specific typed array . * @ param typedArray * The typed array , the margin should be obtained from , as an instance of the class * { @ link TypedArray } . The typed array may not be null */ private void obtainDialogButtonBarDividerMargin ( @ NonNull final TypedArray typedArray ) { } }
setDialogDividerMargin ( typedArray . getDimensionPixelSize ( R . styleable . DialogPreference_dialogDividerMargin , 0 ) ) ;
public class AbstractAmazonRDSAsync { /** * Simplified method form for invoking the DescribeDBSnapshots operation with an AsyncHandler . * @ see # describeDBSnapshotsAsync ( DescribeDBSnapshotsRequest , com . amazonaws . handlers . AsyncHandler ) */ @ Override public java . util . concurrent . Future < DescribeDBSnapshotsResult > describeDBSnapshotsAsync ( com . amazonaws . handlers . AsyncHandler < DescribeDBSnapshotsRequest , DescribeDBSnapshotsResult > asyncHandler ) { } }
return describeDBSnapshotsAsync ( new DescribeDBSnapshotsRequest ( ) , asyncHandler ) ;
public class ForkJoinTask { /** * Blocks a non - worker - thread until completion . * @ return status upon completion */ private int externalAwaitDone ( ) { } }
boolean interrupted = false ; int s ; while ( ( s = status ) >= 0 ) { if ( U . compareAndSwapInt ( this , STATUS , s , s | SIGNAL ) ) { synchronized ( this ) { if ( status >= 0 ) { try { wait ( ) ; } catch ( InterruptedException ie ) { interrupted = true ; } } else notifyAll ( ) ; } } } if ( interrupted ) Thread . currentThread ( ) . interrupt ( ) ; return s ;
public class StmtUtil { /** * Issue get - result call to get query result given an in progress response . * @ param queryId id of query to get results for * @ param session the current session * @ return results in JSON * @ throws SFException exception raised from Snowflake components * @ throws SnowflakeSQLException exception raised from Snowflake components */ static protected JsonNode getQueryResultJSON ( String queryId , SFSession session ) throws SFException , SnowflakeSQLException { } }
String getResultPath = String . format ( SF_PATH_QUERY_RESULT , queryId ) ; StmtInput stmtInput = new StmtInput ( ) . setServerUrl ( session . getServerUrl ( ) ) . setSessionToken ( session . getSessionToken ( ) ) . setNetworkTimeoutInMillis ( session . getNetworkTimeoutInMilli ( ) ) . setMediaType ( SF_MEDIA_TYPE ) . setServiceName ( session . getServiceName ( ) ) ; String resultAsString = getQueryResult ( getResultPath , stmtInput ) ; StmtOutput stmtOutput = pollForOutput ( resultAsString , stmtInput , null ) ; return stmtOutput . getResult ( ) ;
public class InMemoryCacheEntry { /** * / * ( non - Javadoc ) * @ see com . gistlabs . mechanize . cache . InMemoryCacheEntry # head ( ) */ @ Override public HttpResponse head ( ) { } }
BasicHttpResponse response = new BasicHttpResponse ( this . response . getStatusLine ( ) ) ; Header [ ] allHeaders = this . response . getAllHeaders ( ) ; for ( Header allHeader : allHeaders ) response . addHeader ( allHeader ) ; response . setEntity ( new ByteArrayEntity ( new byte [ ] { } ) ) ; return response ;
public class RuleWrapper { /** * Getter for the used { @ link TextSymbolizerWrapper } . * < p > Currently only one { @ link TextSymbolizer } is supported in editing , so just the first is used . < / p > * @ return the used { @ link TextSymbolizer } . */ public TextSymbolizerWrapper getTextSymbolizersWrapper ( ) { } }
for ( SymbolizerWrapper symbolizerWrapper : symbolizersWrapperList ) { if ( symbolizerWrapper . isTextSymbolizer ( ) ) { return ( TextSymbolizerWrapper ) symbolizerWrapper ; } } return null ;
public class AWSDynamoUtils { /** * Converts a { @ link ParaObject } to DynamoDB row . * @ param < P > type of object * @ param so an object * @ param filter used to filter out fields on update . * @ return a row representation of the given object . */ protected static < P extends ParaObject > Map < String , AttributeValue > toRow ( P so , Class < ? extends Annotation > filter ) { } }
HashMap < String , AttributeValue > row = new HashMap < > ( ) ; if ( so == null ) { return row ; } for ( Map . Entry < String , Object > entry : ParaObjectUtils . getAnnotatedFields ( so , filter ) . entrySet ( ) ) { Object value = entry . getValue ( ) ; if ( value != null && ! StringUtils . isBlank ( value . toString ( ) ) ) { row . put ( entry . getKey ( ) , new AttributeValue ( value . toString ( ) ) ) ; } } if ( so . getVersion ( ) != null && so . getVersion ( ) > 0 ) { row . put ( Config . _VERSION , new AttributeValue ( ) . withN ( so . getVersion ( ) . toString ( ) ) ) ; } else { row . remove ( Config . _VERSION ) ; } return row ;
public class CreateMLModelRequest { /** * A list of the training parameters in the < code > MLModel < / code > . The list is implemented as a map of key - value * pairs . * The following is the current set of training parameters : * < ul > * < li > * < code > sgd . maxMLModelSizeInBytes < / code > - The maximum allowed size of the model . Depending on the input data , the * size of the model might affect its performance . * The value is an integer that ranges from < code > 100000 < / code > to < code > 2147483648 < / code > . The default value is * < code > 33554432 < / code > . * < / li > * < li > * < code > sgd . maxPasses < / code > - The number of times that the training process traverses the observations to build * the < code > MLModel < / code > . The value is an integer that ranges from < code > 1 < / code > to < code > 10000 < / code > . The * default value is < code > 10 < / code > . * < / li > * < li > * < code > sgd . shuffleType < / code > - Whether Amazon ML shuffles the training data . Shuffling the data improves a * model ' s ability to find the optimal solution for a variety of data types . The valid values are < code > auto < / code > * and < code > none < / code > . The default value is < code > none < / code > . We < ? oxy _ insert _ start author = " laurama " * timestamp = " 20160329T131121-0700 " > strongly recommend that you shuffle your data . < ? oxy _ insert _ end > * < / li > * < li > * < code > sgd . l1RegularizationAmount < / code > - The coefficient regularization L1 norm . It controls overfitting the * data by penalizing large coefficients . This tends to drive coefficients to zero , resulting in a sparse feature * set . If you use this parameter , start by specifying a small value , such as < code > 1.0E - 08 < / code > . * The value is a double that ranges from < code > 0 < / code > to < code > MAX _ DOUBLE < / code > . The default is to not use L1 * normalization . This parameter can ' t be used when < code > L2 < / code > is specified . Use this parameter sparingly . * < / li > * < li > * < code > sgd . l2RegularizationAmount < / code > - The coefficient regularization L2 norm . It controls overfitting the * data by penalizing large coefficients . This tends to drive coefficients to small , nonzero values . If you use this * parameter , start by specifying a small value , such as < code > 1.0E - 08 < / code > . * The value is a double that ranges from < code > 0 < / code > to < code > MAX _ DOUBLE < / code > . The default is to not use L2 * normalization . This parameter can ' t be used when < code > L1 < / code > is specified . Use this parameter sparingly . * < / li > * < / ul > * @ param parameters * A list of the training parameters in the < code > MLModel < / code > . The list is implemented as a map of * key - value pairs . < / p > * The following is the current set of training parameters : * < ul > * < li > * < code > sgd . maxMLModelSizeInBytes < / code > - The maximum allowed size of the model . Depending on the input * data , the size of the model might affect its performance . * The value is an integer that ranges from < code > 100000 < / code > to < code > 2147483648 < / code > . The default value * is < code > 33554432 < / code > . * < / li > * < li > * < code > sgd . maxPasses < / code > - The number of times that the training process traverses the observations to * build the < code > MLModel < / code > . The value is an integer that ranges from < code > 1 < / code > to * < code > 10000 < / code > . The default value is < code > 10 < / code > . * < / li > * < li > * < code > sgd . shuffleType < / code > - Whether Amazon ML shuffles the training data . Shuffling the data improves a * model ' s ability to find the optimal solution for a variety of data types . The valid values are * < code > auto < / code > and < code > none < / code > . The default value is < code > none < / code > . We < ? oxy _ insert _ start * author = " laurama " timestamp = " 20160329T131121-0700 " > strongly recommend that you shuffle your * data . < ? oxy _ insert _ end > * < / li > * < li > * < code > sgd . l1RegularizationAmount < / code > - The coefficient regularization L1 norm . It controls overfitting * the data by penalizing large coefficients . This tends to drive coefficients to zero , resulting in a sparse * feature set . If you use this parameter , start by specifying a small value , such as < code > 1.0E - 08 < / code > . * The value is a double that ranges from < code > 0 < / code > to < code > MAX _ DOUBLE < / code > . The default is to not * use L1 normalization . This parameter can ' t be used when < code > L2 < / code > is specified . Use this parameter * sparingly . * < / li > * < li > * < code > sgd . l2RegularizationAmount < / code > - The coefficient regularization L2 norm . It controls overfitting * the data by penalizing large coefficients . This tends to drive coefficients to small , nonzero values . If * you use this parameter , start by specifying a small value , such as < code > 1.0E - 08 < / code > . * The value is a double that ranges from < code > 0 < / code > to < code > MAX _ DOUBLE < / code > . The default is to not * use L2 normalization . This parameter can ' t be used when < code > L1 < / code > is specified . Use this parameter * sparingly . * < / li > * @ return Returns a reference to this object so that method calls can be chained together . */ public CreateMLModelRequest withParameters ( java . util . Map < String , String > parameters ) { } }
setParameters ( parameters ) ; return this ;
public class SymbolTable { /** * This function uses = = to compare types to be exact same instances . */ @ SuppressWarnings ( "ReferenceEquality" ) private void createPropertyScopeFor ( Symbol s ) { } }
// In order to build a property scope for s , we will need to build // a property scope for all its implicit prototypes first . This means // that sometimes we will already have built its property scope // for a previous symbol . if ( s . propertyScope != null ) { return ; } ObjectType type = getType ( s ) == null ? null : getType ( s ) . toObjectType ( ) ; if ( type == null ) { return ; } // Create an empty property scope for the given symbol , maybe with a parent scope if it has // an implicit prototype . SymbolScope parentPropertyScope = maybeGetParentPropertyScope ( type ) ; s . setPropertyScope ( new SymbolScope ( null , parentPropertyScope , type , s ) ) ; // If this symbol represents some ' a . b . c . prototype ' , add any instance properties of a . b . c // into the symbol scope . ObjectType instanceType = type ; Iterable < String > propNames = type . getOwnPropertyNames ( ) ; if ( instanceType . isFunctionPrototypeType ( ) ) { // Guard against modifying foo . prototype when foo is a regular ( non - constructor ) function . if ( instanceType . getOwnerFunction ( ) . hasInstanceType ( ) ) { // Merge the properties of " Foo . prototype " and " new Foo ( ) " together . instanceType = instanceType . getOwnerFunction ( ) . getInstanceType ( ) ; propNames = Iterables . concat ( propNames , instanceType . getOwnPropertyNames ( ) ) ; } } // Add all declared properties in propNames into the property scope for ( String propName : propNames ) { StaticSlot newProp = instanceType . getSlot ( propName ) ; if ( newProp . getDeclaration ( ) == null ) { // Skip properties without declarations . We won ' t know how to index // them , because we index things by node . continue ; } // We have symbol tables that do not do type analysis . They just try // to build a complete index of all objects in the program . So we might // already have symbols for things like " Foo . bar " . If this happens , // throw out the old symbol and use the type - based symbol . Symbol oldProp = symbols . get ( newProp . getDeclaration ( ) . getNode ( ) , s . getName ( ) + "." + propName ) ; // If we ' ve already have an entry in the table for this symbol , // then skip it . This should only happen if we screwed up , // and declared multiple distinct properties with the same name // at the same node . We bail out here to be safe . if ( symbols . get ( newProp . getDeclaration ( ) . getNode ( ) , newProp . getName ( ) ) != null ) { if ( logger . isLoggable ( Level . FINE ) ) { logger . fine ( "Found duplicate symbol " + newProp ) ; } continue ; } Symbol newSym = copySymbolTo ( newProp , s . propertyScope ) ; if ( oldProp != null ) { if ( newSym . getJSDocInfo ( ) == null ) { newSym . setJSDocInfo ( oldProp . getJSDocInfo ( ) ) ; } newSym . setPropertyScope ( oldProp . propertyScope ) ; for ( Reference ref : oldProp . references . values ( ) ) { newSym . defineReferenceAt ( ref . getNode ( ) ) ; } // All references / scopes from oldProp were updated to use the newProp . Time to remove // oldProp . removeSymbol ( oldProp ) ; } }
public class MetaGraphDef { /** * < pre > * SaverDef . * < / pre > * < code > optional . tensorflow . SaverDef saver _ def = 3 ; < / code > */ public org . tensorflow . util . SaverDef getSaverDef ( ) { } }
return saverDef_ == null ? org . tensorflow . util . SaverDef . getDefaultInstance ( ) : saverDef_ ;
public class CmsContainerpageService { /** * Returns the no - edit reason for the given resource . < p > * @ param cms the current cms object * @ param containerPage the resource * @ return the no - edit reason , empty if editing is allowed * @ throws CmsException is something goes wrong */ private String getNoEditReason ( CmsObject cms , CmsResource containerPage ) throws CmsException { } }
return new CmsResourceUtil ( cms , containerPage ) . getNoEditReason ( OpenCms . getWorkplaceManager ( ) . getWorkplaceLocale ( cms ) ) ;
public class CSVFile { /** * Extracts the language of the " master language value " and " value " * column heading in the first line of the trema CSV file . * @ param columnHeading the column heading . The expected format is : * < code > master ( & lt ; lang & gt ; ) < / code > or < code > value ( & lt ; lang & gt ; ) < / code > . * @ return the extracted language * @ throws ParseException if any parse errors occur */ private String extractLanguage ( String columnHeading ) throws ParseException { } }
int start = columnHeading . indexOf ( '(' ) ; int end = columnHeading . indexOf ( ')' ) ; if ( start == - 1 || end == - 1 || start >= end ) { throwWrongHeaderException ( ) ; } return columnHeading . substring ( start + 1 , end ) ;
public class TableBuilder { /** * Set a border on the outline of the whole table , around the first row and draw vertical lines * around each column . * @ param style the style to apply * @ return this , for method chaining */ public TableBuilder addHeaderAndVerticalsBorders ( BorderStyle style ) { } }
this . addBorder ( 0 , 0 , 1 , model . getColumnCount ( ) , OUTLINE , style ) ; this . addBorder ( 0 , 0 , model . getRowCount ( ) , model . getColumnCount ( ) , OUTLINE | INNER_VERTICAL , style ) ; return this ;
public class BMStatefulBeanO { /** * d671368 */ @ Override boolean eligibleForLock ( EJSDeployedSupport methodContext , ContainerTx tx ) // d671368 { } }
// If the bean is enlisted in a global transaction , and was in a // method on a concurrent thread at the time this thread attempted // to call a method . . . then the current thread was unaware of the // global transaction and started a local transaction . For this // scenario , the local tran should be ignored when determining // if the bean is eligible to be locked . The local tran will be // ' completed ' and the global tran resumed after the lock has // been acquired . . . during ' enlist ' . if ( state == TX_METHOD_READY && currentTx != null && currentTx . isTransactionGlobal ( ) && ! tx . isTransactionGlobal ( ) ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) Tr . debug ( tc , "eligibleForLock : ignoring local tran : " + tx ) ; if ( super . eligibleForLock ( methodContext , currentTx ) ) // d671368.1 d704504 { // Attempt to transition thread from local to sticky global tran . // Lock the bean only if successful . d671368.1 return container . transitionToStickyGlobalTran ( beanId , tx , currentTx ) ; } return false ; // d671368.1 } // Not the sticky global tran scenario . . . so perform normal checking . return super . eligibleForLock ( methodContext , tx ) ; // d704504
public class LongToDoubleFunctionBuilder { /** * One of ways of creating builder . This is possibly the least verbose way where compiler should be able to guess the generic parameters . */ @ Nonnull public static LongToDoubleFunction longToDblFunctionFrom ( Consumer < LongToDoubleFunctionBuilder > buildingFunction ) { } }
LongToDoubleFunctionBuilder builder = new LongToDoubleFunctionBuilder ( ) ; buildingFunction . accept ( builder ) ; return builder . build ( ) ;
public class vpnvserver_vpnsessionpolicy_binding { /** * Use this API to fetch vpnvserver _ vpnsessionpolicy _ binding resources of given name . */ public static vpnvserver_vpnsessionpolicy_binding [ ] get ( nitro_service service , String name ) throws Exception { } }
vpnvserver_vpnsessionpolicy_binding obj = new vpnvserver_vpnsessionpolicy_binding ( ) ; obj . set_name ( name ) ; vpnvserver_vpnsessionpolicy_binding response [ ] = ( vpnvserver_vpnsessionpolicy_binding [ ] ) obj . get_resources ( service ) ; return response ;
public class Mappings { /** * Returns a mapping that maps objects to a supertype representation . * @ return the " upcast " mapping */ @ SuppressWarnings ( "unchecked" ) public static < S , T extends S > Mapping < T , S > upcast ( ) { } }
return ( Mapping < T , S > ) IDENTITY_MAPPING ;
public class OAuthUtils { /** * Generate a Principal from a webid claim . * @ param claims the JWT claims * @ return a Principal , if one can be generated from a webid claim */ public static Optional < Principal > withWebIdClaim ( final Claims claims ) { } }
final String webid = claims . get ( WEBID , String . class ) ; if ( webid == null ) return empty ( ) ; LOGGER . debug ( "Using JWT claim with webid: {}" , webid ) ; return of ( new OAuthPrincipal ( webid ) ) ;
public class Axis { /** * Remove a label from the axis . */ public Axis removeLabel ( String label ) { } }
if ( labels == null ) { throw new IllegalStateException ( ) ; } labels . remove ( label ) ; setSlice ( ) ; initGridLabels ( ) ; return this ;
public class LogRef { /** * * Log an info level message . * * * @ param msg * Log message * * @ param sr * The < code > ServiceReference < / code > of the service * that this * message is associated with . * * @ param e * The exception that reflects the condition . */ public void info ( String msg , ServiceReference sr , Throwable e ) { } }
doLog ( msg , LOG_INFO , sr , e ) ;
public class TTTLearnerVPDA { /** * Finalize a discriminator . Given a block root and a { @ link Splitter } , replace the discriminator at the block root * by the one derived from the splitter , and update the discrimination tree accordingly . * @ param blockRoot * the block root whose discriminator to finalize * @ param splitter * the splitter to use for finalization */ private void finalizeDiscriminator ( DTNode < I > blockRoot , Splitter < I > splitter ) { } }
assert blockRoot . isBlockRoot ( ) ; ContextPair < I > newDiscr = splitter . getNewDiscriminator ( ) ; if ( ! blockRoot . getDiscriminator ( ) . equals ( newDiscr ) ) { ContextPair < I > finalDiscriminator = prepareSplit ( blockRoot , splitter ) ; Map < Boolean , DTNode < I > > repChildren = new HashMap < > ( ) ; for ( Boolean label : blockRoot . getSplitData ( ) . getLabels ( ) ) { repChildren . put ( label , extractSubtree ( blockRoot , label ) ) ; } blockRoot . replaceChildren ( repChildren ) ; blockRoot . setDiscriminator ( finalDiscriminator ) ; } else { LOGGER . debug ( "Weird.." ) ; } declareFinal ( blockRoot ) ;
public class HadoopConfigurationInjector { /** * Loads an Azkaban property into the Hadoop configuration . * @ param props The Azkaban properties * @ param conf The Hadoop configuration * @ param name The property name to load from the Azkaban properties into the Hadoop configuration */ public static void loadProp ( Props props , Configuration conf , String name ) { } }
String prop = props . get ( name ) ; if ( prop != null ) { conf . set ( name , prop ) ; }
public class ParameterBuilder { /** * Adds all parameter from a map * @ param parameters map with parameters to add . Null values will be skipped . * @ return itself */ public ParameterBuilder addAll ( Map < String , Object > parameters ) { } }
if ( parameters != null ) { for ( String k : parameters . keySet ( ) ) { if ( parameters . get ( k ) != null ) { this . parameters . put ( k , parameters . get ( k ) ) ; } } } return this ;
public class ExecuteMojo { /** * Allow the script to work with every JAR dependency of both the project and plugin , including * optional and provided dependencies . Runtime classpath elements are loaded first , so that * legacy behavior is not modified . Additional elements are added first in the order of * project artifacts , then in the order of plugin artifacts . */ protected List getProjectClasspathElements ( ) throws DependencyResolutionRequiredException { } }
Set results = new LinkedHashSet ( ) ; Set includes = getClasspathIncludes ( ) ; if ( includes . contains ( CLASSPATH_INCLUDE_ALL ) || includes . contains ( CLASSPATH_INCLUDE_RUNTIME ) ) { for ( Iterator i = project . getRuntimeClasspathElements ( ) . iterator ( ) ; i . hasNext ( ) ; ) { String fileName = ( String ) i . next ( ) ; try { results . add ( new File ( fileName ) . getCanonicalPath ( ) ) ; } catch ( IOException e ) { throw new RuntimeException ( "Classpath element not found: " + fileName , e ) ; } } } if ( includes . contains ( CLASSPATH_INCLUDE_ALL ) || includes . contains ( CLASSPATH_INCLUDE_ARTIFACTS ) ) { for ( Iterator i = project . getArtifacts ( ) . iterator ( ) ; i . hasNext ( ) ; ) { Artifact artifact = ( Artifact ) i . next ( ) ; if ( artifact . getType ( ) . equals ( "jar" ) && artifact . getClassifier ( ) == null ) { try { results . add ( artifact . getFile ( ) . getCanonicalPath ( ) ) ; } catch ( IOException e ) { throw new RuntimeException ( "Maven artifact file not found: " + artifact , e ) ; } } } } if ( includes . contains ( CLASSPATH_INCLUDE_ALL ) || includes . contains ( CLASSPATH_INCLUDE_PLUGINS ) ) { for ( Iterator i = pluginArtifacts . iterator ( ) ; i . hasNext ( ) ; ) { Artifact artifact = ( Artifact ) i . next ( ) ; if ( artifact . getType ( ) . equals ( "jar" ) && artifact . getClassifier ( ) == null ) { try { results . add ( artifact . getFile ( ) . getCanonicalPath ( ) ) ; } catch ( IOException e ) { throw new RuntimeException ( "Maven plugin-artifact file not found: " + artifact , e ) ; } } } } return new ArrayList ( results ) ;
public class CommandParser { /** * Reads an argument of type " astring " from the request . */ public String astring ( ImapRequestLineReader request ) throws ProtocolException { } }
char next = request . nextWordChar ( ) ; switch ( next ) { case '"' : return consumeQuoted ( request ) ; case '{' : return consumeLiteral ( request ) ; default : return atom ( request ) ; }
public class DeployService2Impl { /** * Returns true if the entry is modified . */ public boolean isModified ( ) { } }
I instance = _instance ; if ( instance == null ) { return true ; } if ( DeployMode . MANUAL . equals ( _strategy . redeployMode ( ) ) ) { return false ; } return instance . isModified ( ) ;
public class QuickSortAlgorithm { /** * Sorts an array of float values and moves with the sort a second array . * @ param values the array to sort . * @ param valuesToFollow the array that should be sorted following the * indexes of the first array . Can be null . */ public void sort ( float [ ] values , float [ ] valuesToFollow ) { } }
this . valuesToSortFloat = values ; this . valuesToFollowFloat = valuesToFollow ; number = values . length ; monitor . beginTask ( "Sorting..." , - 1 ) ; monitor . worked ( 1 ) ; quicksortFloat ( 0 , number - 1 ) ; monitor . done ( ) ;
public class MOEADD { /** * find the subregion of the ' idx ' th solution in the population */ public int findRegion ( int idx ) { } }
for ( int i = 0 ; i < populationSize ; i ++ ) { if ( subregionIdx [ i ] [ idx ] == 1 ) { return i ; } } return - 1 ;
public class CmsListOpenResourceAction { /** * Returns the most possible right resource name . < p > * @ return the most possible right resource name */ protected String getResourceName ( ) { } }
String resource = getItem ( ) . get ( m_resColumnPathId ) . toString ( ) ; if ( ! getWp ( ) . getCms ( ) . existsResource ( resource , CmsResourceFilter . DEFAULT ) ) { String siteRoot = OpenCms . getSiteManager ( ) . getSiteRoot ( resource ) ; if ( CmsStringUtil . isNotEmptyOrWhitespaceOnly ( siteRoot ) ) { resource = resource . substring ( siteRoot . length ( ) ) ; } if ( ! getWp ( ) . getCms ( ) . existsResource ( resource , CmsResourceFilter . DEFAULT ) ) { resource = null ; } } return resource ;
public class HldSeSelEntityProcNms { /** * < p > Get processor name for Entity with file save . < / p > * @ param pClass a Class * @ return a thing */ protected final String getForFSave ( final Class < ? > pClass ) { } }
if ( SeGoodsSpecifics . class == pClass || SeServiceSpecifics . class == pClass ) { return PrcEntityFSave . class . getSimpleName ( ) ; } return null ;
public class SwapLeg { /** * Returns the constant spread , , if the spread of this leg is constant . Otherwise an < code > UnsupportedOperationException < / code > is thrown . * @ return The constant spread . */ public double getSpread ( ) { } }
// Checking spread array for constant spreads double spread = spreads [ 0 ] ; for ( int i = 1 ; i < spreads . length ; i ++ ) { if ( spreads [ i ] != spread ) { throw new UnsupportedOperationException ( "The method getSpread() is only supported for swap legs with constant spreads." ) ; } } return spread ;
public class ServerManager { /** * Idem as start but throw exceptions . @ see ServerManager # start ( String [ ] , * String ) * @ param args * @ param execName * @ throws DevFailed */ public synchronized void startError ( final String [ ] args , final String execName ) throws DevFailed { } }
if ( isStarted . get ( ) ) { throw DevFailedUtils . newDevFailed ( "this server is already started" ) ; } init ( args , execName ) ;
public class QRDecomposition { /** * Least squares solution of A * X = B * @ param B A Matrix with as many rows as A and any number of columns . * @ return X that minimizes the two norm of Q * R * X - B . * @ throws IllegalArgumentException Matrix row dimensions must agree . * @ throws RuntimeException Matrix is rank deficient . */ public double [ ] [ ] solve ( double [ ] [ ] B ) { } }
if ( B . length != m ) { throw new IllegalArgumentException ( "Matrix row dimensions must agree." ) ; } if ( ! this . isFullRank ( ) ) { throw new RuntimeException ( "Matrix is rank deficient." ) ; } // Copy right hand side int nx = B [ 0 ] . length ; double [ ] [ ] X = B . clone ( ) ; // compute Y = transpose ( Q ) * B for ( int k = 0 ; k < n ; k ++ ) { for ( int j = 0 ; j < nx ; j ++ ) { double s = 0.0 ; for ( int i = k ; i < m ; i ++ ) { s += QR [ i ] [ k ] * X [ i ] [ j ] ; } s = - s / QR [ k ] [ k ] ; for ( int i = k ; i < m ; i ++ ) { X [ i ] [ j ] += s * QR [ i ] [ k ] ; } } } // Solve R * X = Y ; for ( int k = n - 1 ; k >= 0 ; k -- ) { for ( int j = 0 ; j < nx ; j ++ ) { X [ k ] [ j ] /= Rdiag [ k ] ; } for ( int i = 0 ; i < k ; i ++ ) { for ( int j = 0 ; j < nx ; j ++ ) { X [ i ] [ j ] -= X [ k ] [ j ] * QR [ i ] [ k ] ; } } } double [ ] [ ] mat = new double [ n ] [ nx ] ; for ( int i = 0 ; i < mat . length ; i ++ ) { for ( int j = 0 ; j < mat [ 0 ] . length ; j ++ ) { mat [ i ] [ j ] = X [ i ] [ j ] ; } } return mat ;
public class CmsContentCheckDialog { /** * Initializes the content check object or takes an exiting one which is stored in the sesstion . < p > */ protected void initContentCheck ( ) { } }
Object o ; if ( CmsStringUtil . isEmpty ( getParamAction ( ) ) || CmsDialog . DIALOG_INITIAL . equals ( getParamAction ( ) ) ) { // this is the initial dialog call o = null ; } else { // this is not the initial call , get module from session o = getDialogObject ( ) ; } if ( ! ( o instanceof CmsContentCheck ) ) { // create a new content check object m_contentCheck = new CmsContentCheck ( getCms ( ) ) ; } else { // reuse content check object stored in session m_contentCheck = ( CmsContentCheck ) o ; }
public class StringBuilderFutureAppendable { /** * ( non - Javadoc ) * @ see java . util . concurrent . Future # get ( long , java . util . concurrent . TimeUnit ) */ @ Override public CharSequence get ( long timeout , TimeUnit unit ) throws ExecutionException { } }
try { this . futureBuilder . performAppends ( ) ; } catch ( IOException | HttpErrorPage e ) { throw new ExecutionException ( e ) ; } return this . builder . toString ( ) ;
public class EulerSchemeFromProcessModel { /** * This method returns the realization of the process at a certain time index . * @ param timeIndex Time index at which the process should be observed * @ return A vector of process realizations ( on path ) */ @ Override public RandomVariable getProcessValue ( int timeIndex , int componentIndex ) { } }
// Thread safe lazy initialization synchronized ( this ) { if ( discreteProcess == null || discreteProcess . length == 0 ) { doPrecalculateProcess ( ) ; } } if ( discreteProcess [ timeIndex ] [ componentIndex ] == null ) { throw new NullPointerException ( "Generation of process component " + componentIndex + " at time index " + timeIndex + " failed. Likely due to out of memory" ) ; } // Return value of process return discreteProcess [ timeIndex ] [ componentIndex ] ;
public class CmsDriverManager { /** * Returns all direct users of the given organizational unit . < p > * @ param dbc the current db context * @ param orgUnit the organizational unit to get all users for * @ param recursive if all groups of sub - organizational units should be retrieved too * @ return all < code > { @ link CmsUser } < / code > objects in the organizational unit * @ throws CmsException if operation was not successful * @ see org . opencms . security . CmsOrgUnitManager # getResourcesForOrganizationalUnit ( CmsObject , String ) * @ see org . opencms . security . CmsOrgUnitManager # getUsers ( CmsObject , String , boolean ) */ public List < CmsUser > getUsers ( CmsDbContext dbc , CmsOrganizationalUnit orgUnit , boolean recursive ) throws CmsException { } }
return getUserDriver ( dbc ) . getUsers ( dbc , orgUnit , recursive ) ;
public class DefaultFunctionMapper { /** * ( non - Javadoc ) * @ see java . io . Externalizable # readExternal ( java . io . ObjectInput ) */ @ SuppressWarnings ( "unchecked" ) public void readExternal ( ObjectInput in ) throws IOException , ClassNotFoundException { } }
_functions = ( Map < String , Function > ) in . readObject ( ) ;
public class JaegerTracerFactory { /** * The Jaeger Tracer builder bean . * @ param configuration The configuration * @ return The builder */ @ Singleton @ Primary @ Requires ( classes = JaegerTracer . Builder . class ) JaegerTracer . Builder jaegerTracerBuilder ( Configuration configuration ) { } }
JaegerTracer . Builder tracerBuilder = resolveBuilder ( configuration ) ; if ( this . configuration . isExpandExceptionLogs ( ) ) { tracerBuilder . withExpandExceptionLogs ( ) ; } if ( this . configuration . isZipkinSharedRpcSpan ( ) ) { tracerBuilder . withZipkinSharedRpcSpan ( ) ; } if ( reporter != null ) { tracerBuilder . withReporter ( reporter ) ; } if ( sampler != null ) { tracerBuilder . withSampler ( sampler ) ; } return tracerBuilder ;
public class PartTreeConverter { /** * Convert a { @ link PartTree } into a where query alike to the one present in the * { @ link Query } ' s where property . */ public static String toIndexedQuery ( final PartTree tree ) { } }
final StringBuilder result = new StringBuilder ( ) ; final Iterator < OrPart > orIt = tree . iterator ( ) ; while ( orIt . hasNext ( ) ) { final OrPart orPart = orIt . next ( ) ; final Iterator < Part > partIt = orPart . iterator ( ) ; while ( partIt . hasNext ( ) ) { final Part part = partIt . next ( ) ; result . append ( " " + part . getProperty ( ) . getSegment ( ) + " " ) ; result . append ( convertOperator ( part . getType ( ) ) ) ; if ( partIt . hasNext ( ) ) { result . append ( " AND " ) ; } } if ( orIt . hasNext ( ) ) { result . append ( " OR " ) ; } } return StringUtil . removeExtraSpaces ( result . toString ( ) ) ;
public class SingleThreadRadixOrder { /** * Hot loop , pulled out from the main run code */ private static void runCopy ( final long start , final long len , final int keySize , final int batchSize , final long otmp [ ] [ ] , final byte xtmp [ ] [ ] , final long o [ ] [ ] , final byte x [ ] [ ] ) { } }
// now copy _ otmp and _ xtmp back over _ o and _ x from the start position , allowing for boundaries // _ o , _ x , _ otmp and _ xtmp all have the same _ batchsize // Would be really nice if Java had 64bit indexing to save programmer time . long numRowsToCopy = len ; int sourceBatch = 0 , sourceOffset = 0 ; int targetBatch = ( int ) ( start / batchSize ) , targetOffset = ( int ) ( start % batchSize ) ; int targetBatchRemaining = batchSize - targetOffset ; // ' remaining ' means of the the full batch , not of the numRowsToCopy int sourceBatchRemaining = batchSize - sourceOffset ; // at most batchSize remaining . No need to actually put the number of rows left in here while ( numRowsToCopy > 0 ) { // TO DO : put this into class as well , to ArrayCopy into batched final int thisCopy = ( int ) Math . min ( numRowsToCopy , Math . min ( sourceBatchRemaining , targetBatchRemaining ) ) ; System . arraycopy ( otmp [ sourceBatch ] , sourceOffset , o [ targetBatch ] , targetOffset , thisCopy ) ; System . arraycopy ( xtmp [ sourceBatch ] , sourceOffset * keySize , x [ targetBatch ] , targetOffset * keySize , thisCopy * keySize ) ; numRowsToCopy -= thisCopy ; sourceOffset += thisCopy ; sourceBatchRemaining -= thisCopy ; targetOffset += thisCopy ; targetBatchRemaining -= thisCopy ; if ( sourceBatchRemaining == 0 ) { sourceBatch ++ ; sourceOffset = 0 ; sourceBatchRemaining = batchSize ; } if ( targetBatchRemaining == 0 ) { targetBatch ++ ; targetOffset = 0 ; targetBatchRemaining = batchSize ; } // ' source ' and ' target ' deliberately the same length variable names and long lines deliberately used so we // can easy match them up vertically to ensure they are the same }
public class VersionListener { /** * ( non - Javadoc ) * @ see net . ossindex . version . parser . VersionBaseListener # exitUnion _ range ( net . ossindex . version . parser . VersionParser . Union _ rangeContext ) */ @ Override public void exitUnion_range ( VersionParser . Union_rangeContext ctx ) { } }
Object o1 = stack . pop ( ) ; Object o2 = stack . pop ( ) ; IVersionRange r1 = null ; IVersionRange r2 = null ; if ( o1 instanceof IVersion ) { r1 = new VersionSet ( ( IVersion ) o1 ) ; } else { r1 = ( IVersionRange ) o1 ; } if ( o2 instanceof IVersion ) { r2 = new VersionSet ( ( IVersion ) o2 ) ; } else { r2 = ( IVersionRange ) o2 ; } if ( r1 instanceof OrRange ) { stack . push ( ( ( OrRange ) r1 ) . add ( r2 ) ) ; } else if ( r2 instanceof OrRange ) { stack . push ( ( ( OrRange ) r2 ) . add ( r1 ) ) ; } else { stack . push ( new OrRange ( r2 , r1 ) ) ; }
public class EditText { /** * Returns the start padding of the view , plus space for the start * Drawable if any . */ @ TargetApi ( Build . VERSION_CODES . JELLY_BEAN_MR1 ) public int getCompoundPaddingStart ( ) { } }
if ( Build . VERSION . SDK_INT >= Build . VERSION_CODES . JELLY_BEAN_MR1 ) return mInputView . getCompoundPaddingStart ( ) ; return mInputView . getCompoundPaddingLeft ( ) ;
public class Expressions { /** * Create a new Path expression * @ param arrayType array type * @ param variable variable name * @ param < A > array type * @ param < E > element type * @ return path expression */ public static < A , E > ArrayPath < A , E > arrayPath ( Class < A > arrayType , String variable ) { } }
return new ArrayPath < A , E > ( arrayType , variable ) ;
public class Ifc2x3tc1PackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public EClass getIfcValue ( ) { } }
if ( ifcValueEClass == null ) { ifcValueEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( Ifc2x3tc1Package . eNS_URI ) . getEClassifiers ( ) . get ( 980 ) ; } return ifcValueEClass ;
public class CloudTasksClient { /** * Acknowledges a pull task . * < p > The worker , that is , the entity that * [ leased ] [ google . cloud . tasks . v2beta2 . CloudTasks . LeaseTasks ] this task must call this method to * indicate that the work associated with the task has finished . * < p > The worker must acknowledge a task within the * [ lease _ duration ] [ google . cloud . tasks . v2beta2 . LeaseTasksRequest . lease _ duration ] or the lease will * expire and the task will become available to be leased again . After the task is acknowledged , * it will not be returned by a later * [ LeaseTasks ] [ google . cloud . tasks . v2beta2 . CloudTasks . LeaseTasks ] , * [ GetTask ] [ google . cloud . tasks . v2beta2 . CloudTasks . GetTask ] , or * [ ListTasks ] [ google . cloud . tasks . v2beta2 . CloudTasks . ListTasks ] . * < p > Sample code : * < pre > < code > * try ( CloudTasksClient cloudTasksClient = CloudTasksClient . create ( ) ) { * TaskName name = TaskName . of ( " [ PROJECT ] " , " [ LOCATION ] " , " [ QUEUE ] " , " [ TASK ] " ) ; * Timestamp scheduleTime = Timestamp . newBuilder ( ) . build ( ) ; * cloudTasksClient . acknowledgeTask ( name , scheduleTime ) ; * < / code > < / pre > * @ param name Required . * < p > The task name . For example : * ` projects / PROJECT _ ID / locations / LOCATION _ ID / queues / QUEUE _ ID / tasks / TASK _ ID ` * @ param scheduleTime Required . * < p > The task ' s current schedule time , available in the * [ schedule _ time ] [ google . cloud . tasks . v2beta2 . Task . schedule _ time ] returned by * [ LeaseTasks ] [ google . cloud . tasks . v2beta2 . CloudTasks . LeaseTasks ] response or * [ RenewLease ] [ google . cloud . tasks . v2beta2 . CloudTasks . RenewLease ] response . This restriction * is to ensure that your worker currently holds the lease . * @ throws com . google . api . gax . rpc . ApiException if the remote call fails */ public final void acknowledgeTask ( TaskName name , Timestamp scheduleTime ) { } }
AcknowledgeTaskRequest request = AcknowledgeTaskRequest . newBuilder ( ) . setName ( name == null ? null : name . toString ( ) ) . setScheduleTime ( scheduleTime ) . build ( ) ; acknowledgeTask ( request ) ;
public class SSTableIndexIndex { /** * Create and write an index index based on the input Cassandra Index . db file . Read the Index . db and generate chunks * ( splits ) based on the configured chunk size . * @ param fileSystem Hadoop file system . * @ param sstablePath SSTable Index . db . * @ throws IOException */ public static void writeIndex ( final FileSystem fileSystem , final Path sstablePath ) throws IOException { } }
final Configuration configuration = fileSystem . getConf ( ) ; final long splitSize = configuration . getLong ( HadoopSSTableConstants . HADOOP_SSTABLE_SPLIT_MB , HadoopSSTableConstants . DEFAULT_SPLIT_MB ) * 1024 * 1024 ; final Closer closer = Closer . create ( ) ; final Path outputPath = sstablePath . suffix ( SSTABLE_INDEX_SUFFIX ) ; final Path inProgressOutputPath = sstablePath . suffix ( SSTABLE_INDEX_IN_PROGRESS_SUFFIX ) ; boolean success = false ; try { final FSDataOutputStream os = closer . register ( fileSystem . create ( inProgressOutputPath ) ) ; final TLongArrayList splitOffsets = new TLongArrayList ( ) ; long currentStart = 0 ; long currentEnd = 0 ; final IndexOffsetScanner index = closer . register ( new IndexOffsetScanner ( sstablePath , fileSystem ) ) ; while ( index . hasNext ( ) ) { // NOTE : This does not give an exact size of this split in bytes but a rough estimate . // This should be good enough since it ' s only used for sorting splits by size in hadoop land . while ( currentEnd - currentStart < splitSize && index . hasNext ( ) ) { currentEnd = index . next ( ) ; splitOffsets . add ( currentEnd ) ; } // Record the split final long [ ] offsets = splitOffsets . toArray ( ) ; os . writeLong ( offsets [ 0 ] ) ; // Start os . writeLong ( offsets [ offsets . length - 1 ] ) ; // End // Clear the offsets splitOffsets . clear ( ) ; if ( index . hasNext ( ) ) { currentStart = index . next ( ) ; currentEnd = currentStart ; splitOffsets . add ( currentStart ) ; } } success = true ; } finally { closer . close ( ) ; if ( ! success ) { fileSystem . delete ( inProgressOutputPath , false ) ; } else { fileSystem . rename ( inProgressOutputPath , outputPath ) ; } }
public class XPathHelper { /** * Returns the XPaths of all nodes retrieved by xpathExpression . Example : / / DIV [ @ id = ' foo ' ] * returns / HTM [ 1 ] / BODY [ 1 ] / DIV [ 2] * @ param dom The dom . * @ param xpathExpression The expression to find the element . * @ return list of XPaths retrieved by xpathExpression . * @ throws XPathExpressionException */ public static ImmutableList < String > getXpathForXPathExpressions ( Document dom , String xpathExpression ) throws XPathExpressionException { } }
NodeList nodeList = XPathHelper . evaluateXpathExpression ( dom , xpathExpression ) ; Builder < String > result = ImmutableList . builder ( ) ; if ( nodeList . getLength ( ) > 0 ) { for ( int i = 0 ; i < nodeList . getLength ( ) ; i ++ ) { Node n = nodeList . item ( i ) ; result . add ( getXPathExpression ( n ) ) ; } } return result . build ( ) ;
public class LdaptiveAuthenticatorBuilder { /** * From the LdapAuthenticationConfiguration class : */ public static Authenticator getAuthenticator ( final LdapAuthenticationProperties l ) { } }
if ( l . getType ( ) == LdapAuthenticationProperties . AuthenticationTypes . AD ) { LOGGER . debug ( "Creating active directory authenticator for {}" , l . getLdapUrl ( ) ) ; return getActiveDirectoryAuthenticator ( l ) ; } if ( l . getType ( ) == LdapAuthenticationProperties . AuthenticationTypes . DIRECT ) { LOGGER . debug ( "Creating direct-bind authenticator for {}" , l . getLdapUrl ( ) ) ; return getDirectBindAuthenticator ( l ) ; } if ( l . getType ( ) == LdapAuthenticationProperties . AuthenticationTypes . SASL ) { LOGGER . debug ( "Creating SASL authenticator for {}" , l . getLdapUrl ( ) ) ; return getSaslAuthenticator ( l ) ; } if ( l . getType ( ) == LdapAuthenticationProperties . AuthenticationTypes . AUTHENTICATED ) { LOGGER . debug ( "Creating authenticated authenticator for {}" , l . getLdapUrl ( ) ) ; return getAuthenticatedOrAnonSearchAuthenticator ( l ) ; } LOGGER . debug ( "Creating anonymous authenticator for {}" , l . getLdapUrl ( ) ) ; return getAuthenticatedOrAnonSearchAuthenticator ( l ) ;
public class MP3Header { /** * Calculate the size of a MP3 frame for this header . * @ return size of the frame including the header */ public int frameSize ( ) { } }
switch ( layerDescription ) { case 3 : // Layer 1 return ( 12 * getBitRate ( ) / getSampleRate ( ) + ( paddingBit ? 1 : 0 ) ) * 4 ; case 2 : case 1 : // Layer 2 and 3 if ( audioVersionId == 3 ) { // MPEG 1 return 144 * getBitRate ( ) / getSampleRate ( ) + ( paddingBit ? 1 : 0 ) ; } else { // MPEG 2 or 2.5 return 72 * getBitRate ( ) / getSampleRate ( ) + ( paddingBit ? 1 : 0 ) ; } default : // Unknown return - 1 ; }
public class GreenPepperXmlRpcClient { /** * { @ inheritDoc } */ @ SuppressWarnings ( "unchecked" ) public Set < Project > getAllProjects ( String identifier ) throws GreenPepperServerException { } }
log . debug ( "Retrieving All Projects" ) ; Vector < Object > projectsParams = ( Vector < Object > ) execute ( XmlRpcMethodName . getAllProjects , identifier ) ; return XmlRpcDataMarshaller . toProjectList ( projectsParams ) ;
public class ExpressionUtils { /** * Create a distinct list of the given args * @ param args elements * @ return list with distinct elements */ public static ImmutableList < Expression < ? > > distinctList ( Expression < ? > ... args ) { } }
final ImmutableList . Builder < Expression < ? > > builder = ImmutableList . builder ( ) ; final Set < Expression < ? > > set = new HashSet < Expression < ? > > ( args . length ) ; for ( Expression < ? > arg : args ) { if ( set . add ( arg ) ) { builder . add ( arg ) ; } } return builder . build ( ) ;
public class NetworkClient { /** * Deletes the specified network . * < p > Sample code : * < pre > < code > * try ( NetworkClient networkClient = NetworkClient . create ( ) ) { * ProjectGlobalNetworkName network = ProjectGlobalNetworkName . of ( " [ PROJECT ] " , " [ NETWORK ] " ) ; * Operation response = networkClient . deleteNetwork ( network . toString ( ) ) ; * < / code > < / pre > * @ param network Name of the network to delete . * @ throws com . google . api . gax . rpc . ApiException if the remote call fails */ @ BetaApi public final Operation deleteNetwork ( String network ) { } }
DeleteNetworkHttpRequest request = DeleteNetworkHttpRequest . newBuilder ( ) . setNetwork ( network ) . build ( ) ; return deleteNetwork ( request ) ;
public class ALPNHackClientHelloExplorer { /** * struct { * uint8 major ; * uint8 minor ; * } ProtocolVersion ; * enum { * change _ cipher _ spec ( 20 ) , alert ( 21 ) , handshake ( 22 ) , * application _ data ( 23 ) , ( 255) * } ContentType ; * struct { * ContentType type ; * ProtocolVersion version ; * uint16 length ; * opaque fragment [ TLSPlaintext . length ] ; * } TLSPlaintext ; */ private static void exploreTLSRecord ( ByteBuffer input , byte firstByte , byte secondByte , byte thirdByte , List < String > alpnProtocols , ByteArrayOutputStream out ) throws SSLException { } }
// Is it a handshake message ? if ( firstByte != 22 ) { // 22 : handshake record throw UndertowMessages . MESSAGES . notHandshakeRecord ( ) ; } // Is there enough data for a full record ? int recordLength = getInt16 ( input ) ; if ( recordLength > input . remaining ( ) ) { throw new BufferUnderflowException ( ) ; } if ( out != null ) { out . write ( 0 ) ; out . write ( 0 ) ; } // We have already had enough source bytes . try { exploreHandshake ( input , secondByte , thirdByte , recordLength , alpnProtocols , out ) ; } catch ( BufferUnderflowException ignored ) { throw UndertowMessages . MESSAGES . invalidHandshakeRecord ( ) ; }
public class Cache { /** * Tells the cache to store under the given resource name the contents * that will be written to the output stream ; the method creates a new * resource entry and opens an output stream to it , then returns the * stream to the caller so this can copy its data into it . It is up to * the caller to close the steam once all data have been written to it . * This mechanism actually by - passes the cache and the miss handlers and * provides direct access to the underlying storage engine , thus providing * a highly efficient way of storing data into the cache . * @ param resource * the name of the new resource , to which the returned output stream * will point ; it must be a valid , non empty string . * @ return * an output stream ; the caller will write its data into it , and then * will flush and close it once it ' s done writing data . * @ throws CacheException */ public OutputStream put ( String resource ) throws CacheException { } }
if ( Strings . isValid ( resource ) ) { return storage . store ( resource ) ; } return null ;
public class StringFixture { /** * Determines integer value of String ( so relative checks can be done ) . * @ param value string to convert to integer . * @ return integer value . */ public Integer convertToInt ( String value ) { } }
Integer result = null ; if ( value != null ) { result = Integer . valueOf ( value ) ; } return result ;
public class LocalTaskQueue { /** * / * ( non - Javadoc ) * @ see org . duracloud . queue . TaskQueue # take ( ) */ @ Override public synchronized Task take ( ) throws TimeoutException { } }
try { Task task = queue . remove ( ) ; inprocess . add ( task ) ; return task ; } catch ( NoSuchElementException ex ) { throw new TimeoutException ( ex ) ; }
public class EnumParameterTypeImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public boolean eIsSet ( int featureID ) { } }
switch ( featureID ) { case BpsimPackage . ENUM_PARAMETER_TYPE__GROUP : return group != null && ! group . isEmpty ( ) ; case BpsimPackage . ENUM_PARAMETER_TYPE__PARAMETER_VALUE_GROUP : return ! getParameterValueGroup ( ) . isEmpty ( ) ; case BpsimPackage . ENUM_PARAMETER_TYPE__PARAMETER_VALUE : return ! getParameterValue ( ) . isEmpty ( ) ; } return super . eIsSet ( featureID ) ;
public class ChronoFormatter { /** * / * [ deutsch ] * < p > Konstruiert einen Formatierer f & uuml ; r Uhrzeitobjekte . < / p > * @ param pattern format pattern * @ param type the type of the pattern to be used * @ param locale format locale * @ return new { @ code ChronoFormatter } - instance * @ throws IllegalArgumentException if resolving of pattern fails * @ see # ofPattern ( String , PatternType , Locale , Chronology ) * @ since 3.1 */ public static ChronoFormatter < PlainTime > ofTimePattern ( String pattern , PatternType type , Locale locale ) { } }
Builder < PlainTime > builder = new Builder < > ( PlainTime . axis ( ) , locale ) ; addPattern ( builder , pattern , type ) ; try { return builder . build ( ) ; } catch ( IllegalStateException ise ) { throw new IllegalArgumentException ( ise ) ; }
public class SqlEntityQueryImpl { /** * { @ inheritDoc } * @ see jp . co . future . uroborosql . fluent . SqlEntityQuery # exists ( java . lang . Runnable ) */ @ Override public void exists ( final Runnable runnable ) { } }
StringBuilder sql = new StringBuilder ( "select 1 from (" ) . append ( System . lineSeparator ( ) ) . append ( aggregationSourceSql ( ) ) . append ( System . lineSeparator ( ) ) . append ( ") t_" ) ; context ( ) . setSql ( sql . toString ( ) ) ; try ( ResultSet rs = agent ( ) . query ( context ( ) ) ) { if ( rs . next ( ) ) { runnable . run ( ) ; } } catch ( final SQLException e ) { throw new EntitySqlRuntimeException ( SqlKind . SELECT , e ) ; }
public class PtoPOutputHandler { /** * Reallocates messages from the sourcestream to alternative localisations * Called when TRM informs us that this localistaion has become unreachable . * Called when TRM informs us that this localisation has become reachable . * Called when TRM informs us that another localisation for the same destination has become reachable . * Called when an Admin action has forced a flush of the streams * Warning : Callers MUST NOT hold any locks as the ReallocationLock is at the top of * the lock hierarchy . * @ param destination The destination handler associated with this outputhandler * @ param txManager Under which to create a transaction for our work */ public void reallocateMsgs ( DestinationHandler destination , boolean allMsgs , boolean forceRemove ) throws SIResourceException { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "reallocateMsgs" , new Object [ ] { this . toString ( ) , destination , new Boolean ( allMsgs ) , new Boolean ( forceRemove ) } ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) SibTr . debug ( tc , "Reallocating messages queued for " + destination . getName ( ) + " from ME " + messageProcessor . getMessagingEngineUuid ( ) + " to ME " + targetMEUuid ) ; // Attempt to reallocate the messages to another localisation of the destination , // or to the exception destination , or discard the messages . ExceptionDestinationHandlerImpl exceptionDestinationHandlerImpl = null ; LocalTransaction transaction = null ; int transactionSize = - 1 ; // PK57432 We are able to send messages , unless we encounter a guess on any stream // ( a guess on one stream will mean all other streams would also get guesses ) . boolean streamCanSendMsgs = true ; // Obtain and release an exclusive lock on the destination to ensure a // send is not taking place LockManager reallocationLock = ( ( BaseDestinationHandler ) destination ) . getReallocationLockManager ( ) ; reallocationLock . lockExclusive ( ) ; // Release Lock reallocationLock . unlockExclusive ( ) ; synchronized ( this ) { StreamSet streamSet = sourceStreamManager . getStreamSet ( ) ; if ( streamSet == null ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "reallocateMsgs" ) ; return ; } Iterator stream_it = streamSet . iterator ( ) ; // Iterate over all streams in the stream set while ( stream_it . hasNext ( ) ) { transactionSize = 0 ; transaction = destination . getTxManager ( ) . createLocalTransaction ( false ) ; SourceStream stream = ( SourceStream ) stream_it . next ( ) ; // We need to leave the stream in the same guess state as it started // in , otherwise , if we incorrectly change it from ' guesses ' to // ' no guesses ' any other thread currently halfway through processing // a message may incorrectly think that it can send a message - this // leaves the message locked to this OutputHandler and cannot be // reallocated later . boolean streamStateChecked = false ; boolean oldGuessState = stream . containsGuesses ( ) ; // prevent the stream from sending any more messages while // we are reallocating . This avoids the need to lock the // stream stream . guessesInStream ( ) ; // Obtain a list of all messages which are in the stream // but have not yet been sent , or all the messages in the // stream if we have been asked to reallocate everything List indexList = null ; if ( allMsgs == true ) indexList = stream . getAllMessagesOnStream ( ) ; else indexList = stream . getMessagesAfterSendWindow ( ) ; ArrayList < SIMPMessage > markedForSilence = new ArrayList < SIMPMessage > ( ) ; Iterator indexList_it = indexList . iterator ( ) ; // Populate msgList with messages indexed in indexList long index = - 1 ; while ( indexList_it . hasNext ( ) ) { index = ( ( Long ) indexList_it . next ( ) ) . longValue ( ) ; MessageItem msg = null ; try { msg = ( MessageItem ) transmissionItemStream . findById ( index ) ; } catch ( MessageStoreException e ) { // FFDC FFDCFilter . processException ( e , "com.ibm.ws.sib.processor.impl.PtoPOutputHandler.reallocateMsgs" , "1:2168:1.241" , this ) ; SibTr . exception ( tc , e ) ; try { // Attempt to rollback any outstanding tran transaction . rollback ( ) ; } catch ( SIException ee ) { // No FFDC code needed SibTr . exception ( tc , ee ) ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "reallocateMsgs" , e ) ; return ; } // Check for null ( message expired ) if ( msg != null ) { streamStateChecked = true ; // It ' s possible that the message was sent to an alias that mapped // to this destination , in which case we need to take into account any // scoped ME sets DestinationHandler scopingDestination = destination ; // For each message we need to reallocate OutputHandler outputHandler = null ; streamCanSendMsgs = false ; // assume that the existing stream cannot boolean removeMsg = true ; // we assume that we will remove the message from // the item stream and write silence into the src stream boolean sendToInputHandler = false ; boolean newStreamIsGuess = false ; if ( ! destination . isToBeDeleted ( ) ) { // If this message has a routingDestination set in it it may be // because the message was fixed on a specific ME or set of MEs . // Pull out the ME to use in the reallocation decision . SIBUuid8 fixedME = null ; JsDestinationAddress routingDestinationAddr = msg . getMessage ( ) . getRoutingDestination ( ) ; if ( routingDestinationAddr != null ) { // Pull out any fixed ME fixedME = routingDestinationAddr . getME ( ) ; // If we ' re not fixed then check to see if we were sent to an alias , and if // so use that for routing from now on . if ( fixedME == null ) { DestinationHandler routingDestHandler ; try { routingDestHandler = messageProcessor . getDestinationManager ( ) . getDestination ( routingDestinationAddr , false ) ; if ( routingDestHandler . isAlias ( ) ) { scopingDestination = routingDestHandler ; } } catch ( SIException e ) { // No FFDC code needed if ( TraceComponent . isAnyTracingEnabled ( ) ) SibTr . exception ( tc , e ) ; // If we can ' t find the alias destination then we ' ve missed our chance // to scope the message . This is acceptable - there is no guarantee on // message scoping if the config changes after sending the message so // ignore this and use the original destination . } } } // Obtain an output handler for this destination outputHandler = scopingDestination . choosePtoPOutputHandler ( fixedME , transmissionItemStream . getLocalizingMEUuid ( ) , // preferred ME ( ! msg . isFromRemoteME ( ) ) , false , null ) ; if ( outputHandler != null ) { newStreamIsGuess = outputHandler . isWLMGuess ( ) ; // determine if this is a new output handler or not if ( outputHandler == this ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) SibTr . debug ( tc , "Leaving message " + msg . getMessage ( ) . getSystemMessageId ( ) + " on this stream (" + newStreamIsGuess + ")" ) ; // PK57432 WLM may have guessed to come to this conclusion , // because all candidates are unavailable . // We should not attempt to transmit the messages in this case . streamCanSendMsgs = ! newStreamIsGuess ; removeMsg = false ; // msg is in the correct place so we // do not need to remove it // If WLM has re - chosen this OutputHandler it means either : // a ) The target is now available // b ) The target is unavailable but there are no other available targets // ( otherwise it would have chosen one of those instead ) // In either case WLM with always return the same choice for every message // on this stream so there is no benefit in continuing to iterate through // the msgs as they will all stay on this stream . if ( fixedME == null && scopingDestination . equals ( destination ) ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) SibTr . debug ( tc , "Exiting reallocate due to no better options" ) ; // If WLM guessed the same handler without any restriction being // forced onto it but we ' re not able to re - send the messages // then exit the msg loop as there ' s no point reallocating more // messages on this stream after a guess . break ; } } else { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) SibTr . debug ( tc , "Reallocating message " + msg . getMessage ( ) . getSystemMessageId ( ) + " to alternative ME" ) ; // WLM chose an alternative outputhandler so get the inputhandler // and resend the message to it ( allowing for the preferredME ) sendToInputHandler = true ; } } // end if outputHandler ! = null else { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) SibTr . debug ( tc , "Leaving message " + msg . getMessage ( ) . getSystemMessageId ( ) + " on this stream due to no OutputHandler!" ) ; removeMsg = false ; // No output handler but that doesnt mean we remove the msg } } else { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) SibTr . debug ( tc , "Discarding message " + msg . getMessage ( ) . getSystemMessageId ( ) + " due to deleted queue" ) ; if ( ! messageProcessor . discardMsgsAfterQueueDeletion ( ) ) { /* * The destination is being removed so messages should be moved to the exception destination */ if ( exceptionDestinationHandlerImpl == null ) { // Create an exception destination handler exceptionDestinationHandlerImpl = ( ExceptionDestinationHandlerImpl ) messageProcessor . createExceptionDestinationHandler ( null ) ; } String destName = destinationHandler . getName ( ) ; if ( destinationHandler . isLink ( ) ) destName = ( ( LinkHandler ) destinationHandler ) . getBusName ( ) ; final UndeliverableReturnCode rc = exceptionDestinationHandlerImpl . handleUndeliverableMessage ( msg , transaction , SIRCConstants . SIRC0032_DESTINATION_DELETED_ERROR , new String [ ] { destName , messageProcessor . getMessagingEngineName ( ) } ) ; if ( rc == UndeliverableReturnCode . ERROR || rc == UndeliverableReturnCode . BLOCK ) { // Messages could not be moved so dont bother trying the rest . Just flag an error . SIErrorException e = new SIErrorException ( nls . getFormattedMessage ( "DESTINATION_DELETED_ERROR_CWSIP0550" , new Object [ ] { destinationHandler . getName ( ) , destinationHandler . getUuid ( ) . toString ( ) } , null ) ) ; FFDCFilter . processException ( e , "com.ibm.ws.sib.processor.impl.PtoPOutputHandler.reallocateMsgs" , "1:2372:1.241" , this ) ; SibTr . exception ( tc , e ) ; removeMsg = false ; break ; } transactionSize ++ ; } // else remove will be true } if ( sendToInputHandler ) { // send this message to a new input handler InputHandler inputHandler = scopingDestination . getInputHandler ( ProtocolType . UNICASTINPUT , null , msg . getMessage ( ) ) ; try { JsMessage message = msg . getMessage ( ) . getReceived ( ) ; MessageItem messageItem = new MessageItem ( message ) ; // Copy across the original message ' s prefer local setting messageItem . setPreferLocal ( msg . preferLocal ( ) ) ; inputHandler . handleMessage ( messageItem , transaction , messageProcessor . getMessagingEngineUuid ( ) ) ; transactionSize ++ ; } catch ( SIException e ) { // No FFDC code needed handleReallocationFailure ( e , destination , transaction ) ; } catch ( MessageCopyFailedException e ) { // No FFDC code needed handleReallocationFailure ( e , destination , transaction ) ; } } // Remove message from itemstream if ( removeMsg || forceRemove ) { Transaction msTran = messageProcessor . resolveAndEnlistMsgStoreTransaction ( transaction ) ; try { // remove it from this itemstream msg . remove ( msTran , msg . getLockID ( ) ) ; transactionSize ++ ; markedForSilence . add ( msg ) ; } catch ( MessageStoreException e ) { // No FFDC code needed handleReallocationFailure ( e , destination , transaction ) ; } } // Batch up the transactional operations . if ( transactionSize > SIMPConstants . REALLOCATION_BATCH_SIZE ) { // Commit the transaction and start another one try { transaction . commit ( ) ; transaction = destination . getTxManager ( ) . createLocalTransaction ( false ) ; transactionSize = 0 ; // Turn markedForSilence to silence Iterator < SIMPMessage > markedForSilence_it = markedForSilence . iterator ( ) ; while ( markedForSilence_it . hasNext ( ) ) { SIMPMessage silenceMsg = markedForSilence_it . next ( ) ; stream . writeSilenceForced ( silenceMsg ) ; } // reset markedForSilence markedForSilence = new ArrayList < SIMPMessage > ( ) ; } catch ( SIException e ) { // No FFDC code needed handleReallocationFailure ( e , destination , transaction ) ; } } } } // Commit transaction try { transaction . commit ( ) ; // Turn markedForSilence to silence Iterator < SIMPMessage > markedForSilence_it = markedForSilence . iterator ( ) ; while ( markedForSilence_it . hasNext ( ) ) { SIMPMessage silenceMsg = markedForSilence_it . next ( ) ; stream . writeSilenceForced ( silenceMsg ) ; } } catch ( SIException e ) { // No FFDC code needed handleReallocationFailure ( e , destination , transaction ) ; } // If there weren ' t any messages driven to re - calculate the guess state of the stream , // make sure the stream is set back to how it was before we started . if ( ! streamStateChecked ) streamCanSendMsgs = ! oldGuessState ; // Drive the transmit of the remaining messages on the stream if ( streamCanSendMsgs ) stream . noGuessesInStream ( ) ; } } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "reallocateMsgs" ) ;