signature
stringlengths 43
39.1k
| implementation
stringlengths 0
450k
|
|---|---|
public class GroupService { /** * Power off groups of servers
* @ param groupFilter search servers criteria by group filter
* @ return OperationFuture wrapper for Server list */
public OperationFuture < List < Server > > powerOff ( GroupFilter groupFilter ) { } }
|
return serverService ( ) . powerOff ( getServerSearchCriteria ( groupFilter ) ) ;
|
public class ADischargeDistributor { /** * Calculates the current subsuperficial discharge .
* The discharge takes into account the distribution
* of all the distributed discharge contributions
* in the prior timesteps .
* @ param subSuperficialDischarge the non distributed discharge value .
* @ param saturatedAreaPercentage the percentage of saturated area .
* @ param timeInMillis the current timestep .
* @ return the calculated discharge . */
public double calculateSubsuperficialDischarge ( double subSuperficialDischarge , double saturatedAreaPercentage , long timeInMillis ) { } }
|
distributeIncomingSubSuperficialDischarge ( subSuperficialDischarge , saturatedAreaPercentage , timeInMillis ) ; return subSuperficialDischargeArray [ indexFromTimeInMillis ( timeInMillis ) ] ;
|
public class CollectionUtilities { /** * Splits the stringArray by the delimiter into an array of strings ignoring the escaped delimiters
* @ param stringArray the string to be split
* @ param delimiter the delimiter by which to split the stringArray
* @ return an array of Strings */
@ NotNull public static String [ ] toArray ( @ Nullable final String stringArray , @ NotNull final String delimiter ) { } }
|
if ( StringUtils . isEmpty ( stringArray ) ) { return new String [ 0 ] ; } final String regex = "(?<!\\\\)" + Pattern . quote ( delimiter ) ; return stringArray . split ( regex ) ;
|
public class Tools { /** * Convert something to a double in a fast way having a good guess
* that it is a double . This is perfect for MongoDB data that * should *
* have been stored as doubles already so there is a high probability
* of easy converting .
* @ param x The object to convert to a double
* @ return Converted object , 0 if empty or something went wrong . */
public static Double getDouble ( Object x ) { } }
|
if ( x == null ) { return null ; } if ( x instanceof Double ) { return ( Double ) x ; } if ( x instanceof String ) { String s = x . toString ( ) ; if ( s == null || s . isEmpty ( ) ) { return null ; } } /* * This is the last and probably expensive fallback . This should be avoided by
* only passing in Doubles , Integers , Longs or stuff that can be parsed from it ' s String
* representation . You might have to build cached objects that did a safe conversion
* once for example . There is no way around for the actual values we compare if the
* user sent them in as non - numerical type . */
return Doubles . tryParse ( x . toString ( ) ) ;
|
public class IssueDescriptionReaderV1 { /** * Extracts the list of bug occurrences from the description .
* @ param pDescription
* the issue description
* @ param pStacktraceMD5
* the stacktrace MD5 hash the issue is related to
* @ return the ACRA bug occurrences listed in the description
* @ throws IssueParseException
* malformed issue description */
private Map < String , Date > parseAcraOccurrencesTable ( final String pDescription , final String pStacktraceMD5 ) throws IssueParseException { } }
|
final Map < String , Date > occur = new HashMap < String , Date > ( ) ; // escape braces { and } to use strings in regexp
final String header = IssueDescriptionUtilsV1 . getOccurrencesTableHeader ( ) ; final String escHeader = Pattern . quote ( header ) ; // regexp to find occurrences tables
final Pattern p = Pattern . compile ( escHeader + IssueDescriptionUtilsV1 . EOL + "(?:" + OCCURR_LINE_PATTERN + IssueDescriptionUtilsV1 . EOL + "+)+" , Pattern . DOTALL | Pattern . CASE_INSENSITIVE ) ; final Matcher m = p . matcher ( pDescription ) ; if ( m . find ( ) ) { // regexp to find occurrences lines
final Pattern pLine = Pattern . compile ( OCCURR_LINE_PATTERN ) ; final Matcher mLine = pLine . matcher ( m . group ( ) ) ; while ( mLine . find ( ) ) { final StringTokenizer line = new StringTokenizer ( mLine . group ( ) , "|" ) ; final String acraReportId = line . nextToken ( ) ; final String acraUserCrashDate = line . nextToken ( ) ; try { occur . put ( acraReportId , IssueDescriptionUtilsV1 . parseDate ( acraUserCrashDate ) ) ; } catch ( final ParseException e ) { throw new IssueParseException ( "Unable to parse user crash date of ACRA report " + acraReportId , e ) ; } } } else { throw new IssueParseException ( "No crash occurrence table found in the description" ) ; } if ( m . find ( ) ) { throw new IssueParseException ( "More than 1 occurrence table found in the description" ) ; } if ( MapUtils . isEmpty ( occur ) ) { throw new IssueParseException ( "0 user crash occurrence found in the description" ) ; } return occur ;
|
public class SofaRpcSerialization { /** * 客户端记录序列化请求的耗时和
* @ param requestCommand 请求对象 */
protected void recordSerializeRequest ( RequestCommand requestCommand , InvokeContext invokeContext ) { } }
|
if ( ! RpcInternalContext . isAttachmentEnable ( ) ) { return ; } RpcInternalContext context = null ; if ( invokeContext != null ) { // 客户端异步调用的情况下 , 上下文会放在InvokeContext中传递
context = invokeContext . get ( RemotingConstants . INVOKE_CTX_RPC_CTX ) ; } if ( context == null ) { context = RpcInternalContext . getContext ( ) ; } int cost = context . getStopWatch ( ) . tick ( ) . read ( ) ; int requestSize = RpcProtocol . getRequestHeaderLength ( ) + requestCommand . getClazzLength ( ) + requestCommand . getContentLength ( ) + requestCommand . getHeaderLength ( ) ; // 记录请求序列化大小和请求序列化耗时
context . setAttachment ( RpcConstants . INTERNAL_KEY_REQ_SIZE , requestSize ) ; context . setAttachment ( RpcConstants . INTERNAL_KEY_REQ_SERIALIZE_TIME , cost ) ;
|
public class RebalanceUtils { /** * Interim and final clusters ought to have same partition counts , same
* zones , and same node state . Partitions per node may of course differ .
* @ param interimCluster
* @ param finalCluster */
public static void validateInterimFinalCluster ( final Cluster interimCluster , final Cluster finalCluster ) { } }
|
validateClusterPartitionCounts ( interimCluster , finalCluster ) ; validateClusterZonesSame ( interimCluster , finalCluster ) ; validateClusterNodeCounts ( interimCluster , finalCluster ) ; validateClusterNodeState ( interimCluster , finalCluster ) ; return ;
|
public class CompressionUtils { /** * gunzip the file to the output file .
* @ param pulledFile The source of the gz data
* @ param outFile A target file to put the contents
* @ return The result of the file copy
* @ throws IOException */
public static FileUtils . FileCopyResult gunzip ( final File pulledFile , File outFile ) { } }
|
return gunzip ( Files . asByteSource ( pulledFile ) , outFile ) ;
|
public class JobCoordinator { /** * Sets the job as failed with given error message .
* @ param errorMessage Error message to set for failure */
public synchronized void setJobAsFailed ( String errorMessage ) { } }
|
mJobInfo . setStatus ( Status . FAILED ) ; mJobInfo . setErrorMessage ( errorMessage ) ;
|
public class ZipNumCluster { /** * Adjust from shorter blocks , if loaded */
public long computeTotalLines ( ) { } }
|
long numLines = 0 ; try { numLines = this . getNumLines ( summary . getRange ( "" , "" ) ) ; } catch ( IOException e ) { LOGGER . warning ( e . toString ( ) ) ; return 0 ; } long adjustment = getTotalAdjustment ( ) ; numLines -= ( getNumBlocks ( ) - 1 ) ; numLines *= this . getCdxLinesPerBlock ( ) ; numLines += adjustment ; return numLines ;
|
public class InputFile { /** * Use this setter to send new file .
* @ param mediaFile File to send
* @ param fileName Name of the file
* @ return This object */
public InputFile setMedia ( File mediaFile , String fileName ) { } }
|
this . newMediaFile = mediaFile ; this . mediaName = fileName ; this . attachName = "attach://" + fileName ; this . isNew = true ; return this ;
|
public class SVGAndroidRenderer { private void updateStyleForElement ( RendererState state , SvgElementBase obj ) { } }
|
boolean isRootSVG = ( obj . parent == null ) ; state . style . resetNonInheritingProperties ( isRootSVG ) ; // Apply the styles defined by style attributes on the element
if ( obj . baseStyle != null ) updateStyle ( state , obj . baseStyle ) ; // Apply the styles from any CSS files or < style > elements
if ( document . hasCSSRules ( ) ) { for ( CSSParser . Rule rule : document . getCSSRules ( ) ) { if ( CSSParser . ruleMatch ( this . ruleMatchContext , rule . selector , obj ) ) { updateStyle ( state , rule . style ) ; } } } // Apply the styles defined by the ' style ' attribute . They have the highest precedence .
if ( obj . style != null ) updateStyle ( state , obj . style ) ;
|
public class SnackBar { /** * Show this SnackBar . It will auto attach to the parent view .
* @ param parent Must be { @ linke android . widget . FrameLayout } or { @ link android . widget . RelativeLayout } */
public void show ( ViewGroup parent ) { } }
|
if ( mState == STATE_SHOWING || mState == STATE_DISMISSING ) return ; if ( getParent ( ) != parent ) { if ( getParent ( ) != null ) ( ( ViewGroup ) getParent ( ) ) . removeView ( this ) ; parent . addView ( this ) ; } show ( ) ;
|
public class AbstractTransitionBuilder { /** * Similar to scale ( float ) , but wait until the transition is about to start to perform the evaluation .
* @ param end
* @ return self */
public T delayScale ( @ FloatRange ( from = 0.0 ) float end ) { } }
|
getDelayedProcessor ( ) . addProcess ( SCALE , end ) ; return self ( ) ;
|
public class AfplibFactoryImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
public String convertResourceObjectIncludeObjTypeToString ( EDataType eDataType , Object instanceValue ) { } }
|
return instanceValue == null ? null : instanceValue . toString ( ) ;
|
public class WebAppPublisher { /** * Unpublish a web application .
* @ param webApp web aplication to be unpublished
* @ throws NullArgumentException if web app is null */
public void unpublish ( final WebApp webApp ) { } }
|
NullArgumentException . validateNotNull ( webApp , "Web app" ) ; LOG . debug ( "Unpublishing web application [{}]" , webApp ) ; final ServiceTracker < WebAppDependencyHolder , WebAppDependencyHolder > tracker = webApps . remove ( webApp ) ; if ( tracker != null ) { tracker . close ( ) ; }
|
public class ServiceDirectoryConfig { /** * Get the property object as float , or return defaultVal if property is not defined .
* @ param name
* property name .
* @ param defaultVal
* default property value .
* @ return
* property value as float , return defaultVal if property is undefined . */
public float getFloat ( String name , float defaultVal ) { } }
|
if ( this . configuration . containsKey ( name ) ) { return this . configuration . getFloat ( name ) ; } else { return defaultVal ; }
|
public class SyncPageLoader { /** * Constructor . */
public void init ( URL url , String strHtmlText , JEditorPane editorPane , BaseApplet applet , boolean bChangeCursor ) { } }
|
m_url = url ; m_editorPane = editorPane ; m_strHtmlText = strHtmlText ; m_applet = applet ; m_bChangeCursor = bChangeCursor ;
|
public class SwaptionAnalyticApproximationRebonato { /** * This function calculate the partial derivative < i > d log ( S ) / d log ( L < sub > k < / sub > ) < / i > for
* a given swap rate with respect to a vector of forward rates ( on a given forward rate tenor ) .
* It also returns some useful other quantities like the corresponding discout factors and swap annuities .
* @ param liborPeriodDiscretization The libor period discretization .
* @ param discountCurve The discount curve . If this parameter is null , the discount curve will be calculated from the forward curve .
* @ param forwardCurve The forward curve .
* @ param swapTenor The swap tenor .
* @ return A map containing the partial derivatives ( key " value " ) , the discount factors ( key " discountFactors " ) and the annuities ( key " annuities " ) as vectors of double [ ] ( indexed by forward rate tenor index starting at swap start ) */
public static Map < String , double [ ] > getLogSwaprateDerivative ( TimeDiscretization liborPeriodDiscretization , DiscountCurve discountCurve , ForwardCurve forwardCurve , double [ ] swapTenor ) { } }
|
/* * Small workaround for the case that the discount curve is not set . This part will be removed later . */
AnalyticModelFromCurvesAndVols model = null ; if ( discountCurve == null ) { discountCurve = new DiscountCurveFromForwardCurve ( forwardCurve . getName ( ) ) ; model = new AnalyticModelFromCurvesAndVols ( new Curve [ ] { forwardCurve , discountCurve } ) ; } double swapStart = swapTenor [ 0 ] ; double swapEnd = swapTenor [ swapTenor . length - 1 ] ; // Get the indices of the swap start and end on the forward rate tenor
int swapStartIndex = liborPeriodDiscretization . getTimeIndex ( swapStart ) ; int swapEndIndex = liborPeriodDiscretization . getTimeIndex ( swapEnd ) ; // Precalculate forward rates and discount factors . Note : the swap contains swapEndIndex - swapStartIndex forward rates
double [ ] forwardRates = new double [ swapEndIndex - swapStartIndex + 1 ] ; double [ ] discountFactors = new double [ swapEndIndex - swapStartIndex + 1 ] ; // Calculate discount factor at swap start
discountFactors [ 0 ] = discountCurve . getDiscountFactor ( model , swapStart ) ; // Calculate discount factors for swap period ends ( used for swap annuity )
for ( int liborPeriodIndex = swapStartIndex ; liborPeriodIndex < swapEndIndex ; liborPeriodIndex ++ ) { double libor = forwardCurve . getForward ( null , liborPeriodDiscretization . getTime ( liborPeriodIndex ) ) ; forwardRates [ liborPeriodIndex - swapStartIndex ] = libor ; discountFactors [ liborPeriodIndex - swapStartIndex + 1 ] = discountCurve . getDiscountFactor ( model , liborPeriodDiscretization . getTime ( liborPeriodIndex + 1 ) ) ; } // Precalculate swap annuities
double [ ] swapAnnuities = new double [ swapTenor . length - 1 ] ; double swapAnnuity = 0.0 ; for ( int swapPeriodIndex = swapTenor . length - 2 ; swapPeriodIndex >= 0 ; swapPeriodIndex -- ) { int periodEndIndex = liborPeriodDiscretization . getTimeIndex ( swapTenor [ swapPeriodIndex + 1 ] ) ; swapAnnuity += discountFactors [ periodEndIndex - swapStartIndex ] * ( swapTenor [ swapPeriodIndex + 1 ] - swapTenor [ swapPeriodIndex ] ) ; swapAnnuities [ swapPeriodIndex ] = swapAnnuity ; } int [ ] liborPeriodsInSwapPeriods = new int [ swapTenor . length - 1 ] ; Arrays . fill ( liborPeriodsInSwapPeriods , 0 ) ; for ( int liborPeriodIndex = swapStartIndex , swapPeriodIndex = 0 ; liborPeriodIndex < swapEndIndex ; liborPeriodIndex ++ ) { if ( liborPeriodDiscretization . getTime ( liborPeriodIndex ) >= swapTenor [ swapPeriodIndex + 1 ] ) { swapPeriodIndex ++ ; } liborPeriodsInSwapPeriods [ swapPeriodIndex ] ++ ; } // Precalculate weights : The formula is the one by Rebonato ( note : this formula can be improved easily , we provide it rather for testing / illustration .
double [ ] swapCovarianceWeights = new double [ swapEndIndex - swapStartIndex ] ; for ( int liborPeriodIndex = swapStartIndex , swapPeriodIndex = 0 ; liborPeriodIndex < swapEndIndex ; liborPeriodIndex ++ ) { if ( liborPeriodDiscretization . getTime ( liborPeriodIndex ) >= swapTenor [ swapPeriodIndex + 1 ] ) { swapPeriodIndex ++ ; } double swapAnnuityCurrent = swapAnnuities [ swapPeriodIndex ] ; double swapAnnuityNext = swapPeriodIndex < swapAnnuities . length - 1 ? swapAnnuities [ swapPeriodIndex + 1 ] : 0 ; swapCovarianceWeights [ liborPeriodIndex - swapStartIndex ] = ( swapAnnuityCurrent - swapAnnuityNext ) / swapAnnuity / liborPeriodsInSwapPeriods [ swapPeriodIndex ] ; } // Return results
Map < String , double [ ] > results = new HashMap < > ( ) ; results . put ( "values" , swapCovarianceWeights ) ; results . put ( "discountFactors" , discountFactors ) ; results . put ( "swapAnnuities" , swapAnnuities ) ; return results ;
|
public class AND { /** * Gets intersection of the generated elements by the member constraints .
* @ param match match to process
* @ param ind mapped indices
* @ return satisfying elements */
@ Override public Collection < BioPAXElement > generate ( Match match , int ... ind ) { } }
|
Collection < BioPAXElement > gen = new HashSet < BioPAXElement > ( con [ 0 ] . generate ( match , ind ) ) ; for ( int i = 1 ; i < con . length ; i ++ ) { if ( gen . isEmpty ( ) ) break ; gen . retainAll ( con [ i ] . generate ( match , ind ) ) ; } return gen ;
|
public class XStringForFSB { /** * Compares two strings lexicographically .
* @ param xstr the < code > String < / code > to be compared .
* @ return the value < code > 0 < / code > if the argument string is equal to
* this string ; a value less than < code > 0 < / code > if this string
* is lexicographically less than the string argument ; and a
* value greater than < code > 0 < / code > if this string is
* lexicographically greater than the string argument .
* @ exception java . lang . NullPointerException if < code > anotherString < / code >
* is < code > null < / code > . */
public int compareTo ( XMLString xstr ) { } }
|
int len1 = m_length ; int len2 = xstr . length ( ) ; int n = Math . min ( len1 , len2 ) ; FastStringBuffer fsb = fsb ( ) ; int i = m_start ; int j = 0 ; while ( n -- != 0 ) { char c1 = fsb . charAt ( i ) ; char c2 = xstr . charAt ( j ) ; if ( c1 != c2 ) { return c1 - c2 ; } i ++ ; j ++ ; } return len1 - len2 ;
|
public class SQLStmt { /** * Get the text of the SQL statement represented .
* @ return String containing the text of the SQL statement represented . */
public String getText ( ) { } }
|
if ( sqlTextStr == null ) { sqlTextStr = new String ( sqlText , Constants . UTF8ENCODING ) ; } return sqlTextStr ;
|
public class BootstrapMojo { private void copyCelerioXsd ( ) throws IOException { } }
|
File xsdDir = new File ( appName + "/src/main/config/celerio-maven-plugin" ) ; xsdDir . mkdirs ( ) ; File celerioXsdFile = new File ( xsdDir , "celerio.xsd" ) ; File nonamespaceXsdFile = new File ( xsdDir , "nonamespace.xsd" ) ; getLog ( ) . info ( "Copy Celerio configuration xsd files to " + appName + "/src/main/config/celerio-maven-plugin" ) ; writeStringToFile ( celerioXsdFile , XsdHelper . getCelerioXsdAsString ( ) , "UTF-8" ) ; writeStringToFile ( nonamespaceXsdFile , XsdHelper . getNonamespaceXsdAsString ( ) , "UTF-8" ) ;
|
public class PluginFileDependency { /** * Creates an instance of the class that indicates the file is within tcMenu itself . Don ' t use for
* new plugins , prefer to package arduino code in the plugin or a new library .
* @ param file the file name
* @ return a new instance */
public static PluginFileDependency fileInTcMenu ( String file ) { } }
|
return new PluginFileDependency ( file , PackagingType . WITHIN_TCMENU , Map . of ( ) ) ;
|
public class Util { /** * Checks whether the given resource is a Java source file .
* @ param resource
* The resource to check .
* @ return < code > true < / code > if the given resource is a Java source file ,
* < code > false < / code > otherwise . */
public static boolean isJavaFile ( IResource resource ) { } }
|
if ( resource == null || ( resource . getType ( ) != IResource . FILE ) ) { return false ; } String ex = resource . getFileExtension ( ) ; return "java" . equalsIgnoreCase ( ex ) ; // $ NON - NLS - 1 $
|
public class Ifc4PackageImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
@ Override public EClass getIfcRoundedRectangleProfileDef ( ) { } }
|
if ( ifcRoundedRectangleProfileDefEClass == null ) { ifcRoundedRectangleProfileDefEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( Ifc4Package . eNS_URI ) . getEClassifiers ( ) . get ( 582 ) ; } return ifcRoundedRectangleProfileDefEClass ;
|
public class DefaultServiceRegistry { /** * - - - PING / PONG HANDLING - - - */
@ Override public Promise ping ( long timeoutMillis , String nodeID ) { } }
|
// Local node ?
if ( this . nodeID . equals ( nodeID ) ) { Tree rsp = new Tree ( ) ; long time = System . currentTimeMillis ( ) ; rsp . put ( "time" , time ) ; rsp . put ( "arrived" , time ) ; return Promise . resolve ( rsp ) ; } // Do we have a transporter ?
if ( transporter == null ) { return Promise . reject ( new ServiceNotAvailableError ( nodeID , "ping" ) ) ; } // Create new promise
Promise promise = new Promise ( ) ; // Set timeout
long timeoutAt ; if ( timeoutMillis > 0 ) { timeoutAt = System . currentTimeMillis ( ) + timeoutMillis ; } else { timeoutAt = 0 ; } // Register promise ( timeout and response handling )
String id = uid . nextUID ( ) ; register ( id , promise , timeoutAt , nodeID , "ping" ) ; // Send request via transporter
Tree message = transporter . createPingPacket ( id ) ; transporter . publish ( PACKET_PING , nodeID , message ) ; // Return promise
return promise ;
|
public class CmsContentService { /** * Returns the widget class name . < p >
* @ param settingsWidget the settings widget name
* @ return the widget class name */
private String getWidgetName ( String settingsWidget ) { } }
|
if ( WIDGET_MAPPINGS . containsKey ( settingsWidget ) ) { return WIDGET_MAPPINGS . get ( settingsWidget ) . getName ( ) ; } else { return CmsInputWidget . class . getName ( ) ; }
|
public class CommonFileUtils { /** * 关闭对应的文件流
* @ param inputStream 待关闭的文件流
* @ param filePath 对应的文件名
* @ throws IOException 关闭时发生IO异常 , 则抛出 */
public static void closeFileStream ( InputStream inputStream , String filePath ) { } }
|
try { if ( inputStream != null ) { inputStream . close ( ) ; } } catch ( IOException e ) { LOG . error ( "close file {} occur an IOExcpetion {}" , filePath , e ) ; }
|
public class AbstractXtypeRuntimeModule { /** * contributed by org . eclipse . xtext . xtext . generator . formatting . Formatter2Fragment2 */
public void configureFormatterPreferences ( Binder binder ) { } }
|
binder . bind ( IPreferenceValuesProvider . class ) . annotatedWith ( FormatterPreferences . class ) . to ( FormatterPreferenceValuesProvider . class ) ;
|
public class TargetsMngrImpl { /** * Diagnostics */
@ Override public List < TargetUsageItem > findUsageStatistics ( String targetId ) { } }
|
// Get usage first
List < String > appNames ; synchronized ( LOCK ) { appNames = applicationsThatUse ( targetId ) ; } // Now , let ' s build the result
Set < TargetUsageItem > result = new HashSet < > ( ) ; for ( Map . Entry < InstanceContext , String > entry : this . instanceToCachedId . entrySet ( ) ) { if ( ! entry . getValue ( ) . equals ( targetId ) ) continue ; String appName = entry . getKey ( ) . getName ( ) ; TargetUsageItem item = new TargetUsageItem ( ) ; item . setName ( appName ) ; item . setVersion ( entry . getKey ( ) . getQualifier ( ) ) ; item . setReferencing ( true ) ; item . setUsing ( appNames . contains ( appName ) ) ; result . add ( item ) ; } return new ArrayList < > ( result ) ;
|
public class HttpBuilder { /** * Executes an asynchronous TRACE request on the configured URI ( asynchronous alias to ` trace ( Class , Consumer ) ` ) , with additional configuration
* provided by the configuration function . The result will be cast to the specified ` type ` .
* This method is generally used for Java - specific configuration .
* [ source , groovy ]
* HttpBuilder http = HttpBuilder . configure ( config - > {
* config . getRequest ( ) . setUri ( " http : / / localhost : 10101 " ) ;
* String result = http . traceAsync ( String . class , config - > {
* config . getRequest ( ) . getUri ( ) . setPath ( " / foo " ) ;
* The ` configuration ` { @ link Consumer } allows additional configuration for this request based on the { @ link HttpConfig } interface .
* @ param type the type of the response content
* @ param configuration the additional configuration function ( delegated to { @ link HttpConfig } )
* @ return the resulting content cast to the specified type wrapped in a { @ link CompletableFuture } */
public < T > CompletableFuture < T > traceAsync ( final Class < T > type , final Consumer < HttpConfig > configuration ) { } }
|
return CompletableFuture . supplyAsync ( ( ) -> trace ( type , configuration ) , getExecutor ( ) ) ;
|
public class ShardingRule { /** * Get logic table names base on actual table name .
* @ param actualTableName actual table name
* @ return logic table name */
public Collection < String > getLogicTableNames ( final String actualTableName ) { } }
|
Collection < String > result = new LinkedList < > ( ) ; for ( TableRule each : tableRules ) { if ( each . isExisted ( actualTableName ) ) { result . add ( each . getLogicTable ( ) ) ; } } return result ;
|
public class FunctionsInner { /** * Updates an existing function under an existing streaming job . This can be used to partially update ( ie . update one or two properties ) a function without affecting the rest the job or function definition .
* @ param resourceGroupName The name of the resource group that contains the resource . You can obtain this value from the Azure Resource Manager API or the portal .
* @ param jobName The name of the streaming job .
* @ param functionName The name of the function .
* @ param function A function object . The properties specified here will overwrite the corresponding properties in the existing function ( ie . Those properties will be updated ) . Any properties that are set to null here will mean that the corresponding property in the existing function will remain the same and not change as a result of this PATCH operation .
* @ param ifMatch The ETag of the function . Omit this value to always overwrite the current function . Specify the last - seen ETag value to prevent accidentally overwritting concurrent changes .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the observable to the FunctionInner object */
public Observable < ServiceResponseWithHeaders < FunctionInner , FunctionsUpdateHeaders > > updateWithServiceResponseAsync ( String resourceGroupName , String jobName , String functionName , FunctionInner function , String ifMatch ) { } }
|
if ( this . client . subscriptionId ( ) == null ) { throw new IllegalArgumentException ( "Parameter this.client.subscriptionId() is required and cannot be null." ) ; } if ( resourceGroupName == null ) { throw new IllegalArgumentException ( "Parameter resourceGroupName is required and cannot be null." ) ; } if ( jobName == null ) { throw new IllegalArgumentException ( "Parameter jobName is required and cannot be null." ) ; } if ( functionName == null ) { throw new IllegalArgumentException ( "Parameter functionName is required and cannot be null." ) ; } if ( function == null ) { throw new IllegalArgumentException ( "Parameter function is required and cannot be null." ) ; } if ( this . client . apiVersion ( ) == null ) { throw new IllegalArgumentException ( "Parameter this.client.apiVersion() is required and cannot be null." ) ; } Validator . validate ( function ) ; return service . update ( this . client . subscriptionId ( ) , resourceGroupName , jobName , functionName , function , ifMatch , this . client . apiVersion ( ) , this . client . acceptLanguage ( ) , this . client . userAgent ( ) ) . flatMap ( new Func1 < Response < ResponseBody > , Observable < ServiceResponseWithHeaders < FunctionInner , FunctionsUpdateHeaders > > > ( ) { @ Override public Observable < ServiceResponseWithHeaders < FunctionInner , FunctionsUpdateHeaders > > call ( Response < ResponseBody > response ) { try { ServiceResponseWithHeaders < FunctionInner , FunctionsUpdateHeaders > clientResponse = updateDelegate ( response ) ; return Observable . just ( clientResponse ) ; } catch ( Throwable t ) { return Observable . error ( t ) ; } } } ) ;
|
public class FileStorage { /** * Opens a file for reading using the properties of the event and streams
* its content as a sequence of { @ link Output } events with the
* end of record flag set in the last event . All generated events are
* considered responses to this event and therefore fired using the event
* processor from the event ' s I / O subchannel .
* @ param event the event
* @ throws InterruptedException if the execution was interrupted */
@ Handler @ SuppressWarnings ( { } }
|
"PMD.AvoidInstantiatingObjectsInLoops" , "PMD.AccessorClassGeneration" , "PMD.AvoidDuplicateLiterals" } ) public void onStreamFile ( StreamFile event ) throws InterruptedException { if ( Arrays . asList ( event . options ( ) ) . contains ( StandardOpenOption . WRITE ) ) { throw new IllegalArgumentException ( "Cannot stream file opened for writing." ) ; } for ( IOSubchannel channel : event . channels ( IOSubchannel . class ) ) { if ( inputWriters . containsKey ( channel ) ) { channel . respond ( new IOError ( event , new IllegalStateException ( "File is already open." ) ) ) ; } else { new FileStreamer ( event , channel ) ; } }
|
public class DefaultBindFuture { /** * Combine futures in a way that minimizes cost ( no object creation ) for the common case where
* both have already been fulfilled . */
public static BindFuture combineFutures ( BindFuture future1 , BindFuture future2 ) { } }
|
if ( future1 == null || future1 . isBound ( ) ) { return future2 ; } else if ( future2 == null || future2 . isBound ( ) ) { return future1 ; } else { return new CompositeBindFuture ( Arrays . asList ( future1 , future2 ) ) ; }
|
public class Assert { /** * Assert that an object is not { @ code null } .
* < pre class = " code " >
* Assert . notNull ( clazz , ( ) - & gt ; " The class ' " + clazz . getName ( ) + " ' must not be null " ) ;
* < / pre >
* @ param object the object to check
* @ param messageSupplier a supplier for the exception message to use if the assertion fails
* @ throws IllegalArgumentException if the object is { @ code null }
* @ since 5.0 */
public static void notNull ( @ Nullable final Object object , final Supplier < String > messageSupplier ) { } }
|
if ( object == null ) { throw new IllegalArgumentException ( Assert . nullSafeGet ( messageSupplier ) ) ; }
|
public class Ifc4PackageImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
@ Override public EClass getIfcPresentationLayerWithStyle ( ) { } }
|
if ( ifcPresentationLayerWithStyleEClass == null ) { ifcPresentationLayerWithStyleEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( Ifc4Package . eNS_URI ) . getEClassifiers ( ) . get ( 448 ) ; } return ifcPresentationLayerWithStyleEClass ;
|
public class AutoCompleteTextFieldComponent { /** * Sets the spinner as progress indicator . */
public void showValidationInProgress ( ) { } }
|
validationIcon . setValue ( null ) ; validationIcon . addStyleName ( "show-status-label" ) ; validationIcon . setStyleName ( SPUIStyleDefinitions . TARGET_FILTER_SEARCH_PROGRESS_INDICATOR_STYLE ) ;
|
public class BpmnParse { /** * Parses all sequence flow of a scope .
* @ param processElement
* The ' process ' element wherein the sequence flow are defined .
* @ param scope
* The scope to which the sequence flow must be added .
* @ param compensationHandlers */
public void parseSequenceFlow ( Element processElement , ScopeImpl scope , Map < String , Element > compensationHandlers ) { } }
|
for ( Element sequenceFlowElement : processElement . elements ( "sequenceFlow" ) ) { String id = sequenceFlowElement . attribute ( "id" ) ; String sourceRef = sequenceFlowElement . attribute ( "sourceRef" ) ; String destinationRef = sequenceFlowElement . attribute ( "targetRef" ) ; // check if destination is a throwing link event ( event source ) which mean
// we have
// to target the catching link event ( event target ) here :
if ( eventLinkSources . containsKey ( destinationRef ) ) { String linkName = eventLinkSources . get ( destinationRef ) ; destinationRef = eventLinkTargets . get ( linkName ) ; if ( destinationRef == null ) { addError ( "sequence flow points to link event source with name '" + linkName + "' but no event target with that name exists. Most probably your link events are not configured correctly." , sequenceFlowElement ) ; // we cannot do anything useful now
return ; } // Reminder : Maybe we should log a warning if we use intermediate link
// events which are not used ?
// e . g . we have a catching event without the corresponding throwing one .
// not done for the moment as it does not break executability
} // Implicit check : sequence flow cannot cross ( sub ) process boundaries : we
// don ' t do a processDefinition . findActivity here
ActivityImpl sourceActivity = scope . findActivityAtLevelOfSubprocess ( sourceRef ) ; ActivityImpl destinationActivity = scope . findActivityAtLevelOfSubprocess ( destinationRef ) ; if ( ( sourceActivity == null && compensationHandlers . containsKey ( sourceRef ) ) || ( sourceActivity != null && sourceActivity . isCompensationHandler ( ) ) ) { addError ( "Invalid outgoing sequence flow of compensation activity '" + sourceRef + "'. A compensation activity should not have an incoming or outgoing sequence flow." , sequenceFlowElement ) ; } else if ( ( destinationActivity == null && compensationHandlers . containsKey ( destinationRef ) ) || ( destinationActivity != null && destinationActivity . isCompensationHandler ( ) ) ) { addError ( "Invalid incoming sequence flow of compensation activity '" + destinationRef + "'. A compensation activity should not have an incoming or outgoing sequence flow." , sequenceFlowElement ) ; } else if ( sourceActivity == null ) { addError ( "Invalid source '" + sourceRef + "' of sequence flow '" + id + "'" , sequenceFlowElement ) ; } else if ( destinationActivity == null ) { addError ( "Invalid destination '" + destinationRef + "' of sequence flow '" + id + "'" , sequenceFlowElement ) ; } else if ( sourceActivity . getActivityBehavior ( ) instanceof EventBasedGatewayActivityBehavior ) { // ignore
} else if ( destinationActivity . getActivityBehavior ( ) instanceof IntermediateCatchEventActivityBehavior && ( destinationActivity . getEventScope ( ) != null ) && ( destinationActivity . getEventScope ( ) . getActivityBehavior ( ) instanceof EventBasedGatewayActivityBehavior ) ) { addError ( "Invalid incoming sequenceflow for intermediateCatchEvent with id '" + destinationActivity . getId ( ) + "' connected to an event-based gateway." , sequenceFlowElement ) ; } else if ( sourceActivity . getActivityBehavior ( ) instanceof SubProcessActivityBehavior && sourceActivity . isTriggeredByEvent ( ) ) { addError ( "Invalid outgoing sequence flow of event subprocess" , sequenceFlowElement ) ; } else if ( destinationActivity . getActivityBehavior ( ) instanceof SubProcessActivityBehavior && destinationActivity . isTriggeredByEvent ( ) ) { addError ( "Invalid incoming sequence flow of event subprocess" , sequenceFlowElement ) ; } else { if ( getMultiInstanceScope ( sourceActivity ) != null ) { sourceActivity = getMultiInstanceScope ( sourceActivity ) ; } if ( getMultiInstanceScope ( destinationActivity ) != null ) { destinationActivity = getMultiInstanceScope ( destinationActivity ) ; } TransitionImpl transition = sourceActivity . createOutgoingTransition ( id ) ; sequenceFlows . put ( id , transition ) ; transition . setProperty ( "name" , sequenceFlowElement . attribute ( "name" ) ) ; transition . setProperty ( "documentation" , parseDocumentation ( sequenceFlowElement ) ) ; transition . setDestination ( destinationActivity ) ; parseSequenceFlowConditionExpression ( sequenceFlowElement , transition ) ; parseExecutionListenersOnTransition ( sequenceFlowElement , transition ) ; for ( BpmnParseListener parseListener : parseListeners ) { parseListener . parseSequenceFlow ( sequenceFlowElement , scope , transition ) ; } } }
|
public class NameSpaceAddressManager { /** * Sets the backing configuration source */
public void setConfSource ( Configurable src ) { } }
|
validateConfigFile ( src . getConf ( ) ) ; confSrc = src ; zkClient = new AvatarZooKeeperClient ( confSrc . getConf ( ) , null , true ) ;
|
public class SibRaStaticDestinationEndpointActivation { /** * Called on deactivation of this endpoint . */
void deactivate ( ) { } }
|
final String methodName = "deactivate" ; if ( TraceComponent . isAnyTracingEnabled ( ) && TRACE . isEntryEnabled ( ) ) { SibTr . entry ( this , TRACE , methodName ) ; } super . deactivate ( ) ; synchronized ( this ) { if ( _remoteConnection != null ) { _remoteConnection . close ( ) ; } } _timer . cancel ( ) ; // PK54585
if ( TraceComponent . isAnyTracingEnabled ( ) && TRACE . isEntryEnabled ( ) ) { SibTr . exit ( this , TRACE , methodName ) ; }
|
public class XMLPrinter { /** * Rule format based on { @ link org . sonar . api . server . rule . RulesDefinitionXmlLoader } */
private static void printAsXML ( Rule rule , StringBuilder xmlStringBuilder ) { } }
|
if ( rule . version != null ) { xmlStringBuilder . append ( " <!-- since " + rule . version + " -->" ) ; xmlStringBuilder . append ( LINE_SEPARATOR ) ; } xmlStringBuilder . append ( " <rule>" ) ; xmlStringBuilder . append ( LINE_SEPARATOR ) ; xmlStringBuilder . append ( " <key>" + rule . fixedRuleKey ( ) + "</key>" ) ; xmlStringBuilder . append ( LINE_SEPARATOR ) ; xmlStringBuilder . append ( " <severity>" + rule . severity + "</severity>" ) ; xmlStringBuilder . append ( LINE_SEPARATOR ) ; xmlStringBuilder . append ( " <name><![CDATA[" + rule . name + "]]></name>" ) ; xmlStringBuilder . append ( LINE_SEPARATOR ) ; xmlStringBuilder . append ( " <internalKey><![CDATA[" + rule . internalKey + "]]></internalKey>" ) ; xmlStringBuilder . append ( LINE_SEPARATOR ) ; xmlStringBuilder . append ( " <description><![CDATA[" + rule . description + "]]></description>" ) ; xmlStringBuilder . append ( LINE_SEPARATOR ) ; if ( ! rule . tags . isEmpty ( ) ) { for ( String tag : rule . tags ) { xmlStringBuilder . append ( " <tag>" + tag + "</tag>" ) ; xmlStringBuilder . append ( LINE_SEPARATOR ) ; } } if ( ! rule . parameters . isEmpty ( ) ) { List < RuleParameter > sortedParameters = Lists . newArrayList ( rule . parameters ) ; Collections . sort ( sortedParameters , new Comparator < RuleParameter > ( ) { @ Override public int compare ( RuleParameter o1 , RuleParameter o2 ) { return o1 . key . compareTo ( o2 . key ) ; } } ) ; for ( RuleParameter parameter : sortedParameters ) { xmlStringBuilder . append ( " <param>" ) ; xmlStringBuilder . append ( LINE_SEPARATOR ) ; xmlStringBuilder . append ( " <key>" + parameter . key + "</key>" ) ; xmlStringBuilder . append ( LINE_SEPARATOR ) ; if ( StringUtils . isNotBlank ( parameter . description ) ) { xmlStringBuilder . append ( " <description><![CDATA[" + parameter . description + "]]></description>" ) ; xmlStringBuilder . append ( LINE_SEPARATOR ) ; } if ( StringUtils . isNotBlank ( parameter . defaultValue ) && ! "null" . equals ( parameter . defaultValue ) ) { xmlStringBuilder . append ( " <defaultValue>" + parameter . defaultValue + "</defaultValue>" ) ; xmlStringBuilder . append ( LINE_SEPARATOR ) ; } xmlStringBuilder . append ( " </param>" ) ; xmlStringBuilder . append ( LINE_SEPARATOR ) ; } } xmlStringBuilder . append ( " </rule>" ) ; xmlStringBuilder . append ( LINE_SEPARATOR ) ; xmlStringBuilder . append ( LINE_SEPARATOR ) ;
|
public class JcrNodeTypeManager { /** * Returns the node type with the given name ( if one exists )
* @ param nodeTypeName the name of the node type to be returned
* @ return the node type with the given name ( if one exists )
* @ see NodeTypes # getNodeType ( Name ) */
JcrNodeType getNodeType ( Name nodeTypeName ) { } }
|
JcrNodeType nodeType = nodeTypes ( ) . getNodeType ( nodeTypeName ) ; if ( nodeType != null ) { nodeType = nodeType . with ( context ( ) , session ) ; } return nodeType ;
|
public class Tomcat { /** * Creates a Tomcat context for Fedora in
* $ CATALINA _ HOME / conf / Catalina / localhost which sets the fedora . home system
* property to the installer - provided value . */
protected void installFedoraContext ( ) throws InstallationFailedException { } }
|
File contextDir = new File ( getConf ( ) . getPath ( ) + File . separator + "Catalina" + File . separator + "localhost" ) ; contextDir . mkdirs ( ) ; try { String content = IOUtils . toString ( this . getClass ( ) . getResourceAsStream ( "/webapp-context/context.xml" ) ) . replace ( "${fedora.home}" , getOptions ( ) . getValue ( InstallOptions . FEDORA_HOME ) ) ; String name = getOptions ( ) . getValue ( InstallOptions . FEDORA_APP_SERVER_CONTEXT ) + ".xml" ; FileOutputStream out = new FileOutputStream ( new File ( contextDir , name ) ) ; out . write ( content . getBytes ( ) ) ; out . close ( ) ; } catch ( Exception e ) { throw new InstallationFailedException ( e . getMessage ( ) , e ) ; }
|
public class BackupResourceVaultConfigsInner { /** * Updates vault security config .
* @ param vaultName The name of the recovery services vault .
* @ param resourceGroupName The name of the resource group where the recovery services vault is present .
* @ param parameters resource config request
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ throws CloudException thrown if the request is rejected by server
* @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
* @ return the BackupResourceVaultConfigResourceInner object if successful . */
public BackupResourceVaultConfigResourceInner update ( String vaultName , String resourceGroupName , BackupResourceVaultConfigResourceInner parameters ) { } }
|
return updateWithServiceResponseAsync ( vaultName , resourceGroupName , parameters ) . toBlocking ( ) . single ( ) . body ( ) ;
|
public class FacesConfigTypeImpl { /** * If not already created , a new < code > referenced - bean < / code > element will be created and returned .
* Otherwise , the first existing < code > referenced - bean < / code > element will be returned .
* @ return the instance defined for the element < code > referenced - bean < / code > */
public FacesConfigReferencedBeanType < FacesConfigType < T > > getOrCreateReferencedBean ( ) { } }
|
List < Node > nodeList = childNode . get ( "referenced-bean" ) ; if ( nodeList != null && nodeList . size ( ) > 0 ) { return new FacesConfigReferencedBeanTypeImpl < FacesConfigType < T > > ( this , "referenced-bean" , childNode , nodeList . get ( 0 ) ) ; } return createReferencedBean ( ) ;
|
public class ParentRunner { /** * Returns a { @ link Statement } : run all non - overridden { @ code @ BeforeClass } methods on this class
* and superclasses before executing { @ code statement } ; if any throws an
* Exception , stop execution and pass the exception on . */
protected Statement withBeforeClasses ( Statement statement ) { } }
|
List < FrameworkMethod > befores = testClass . getAnnotatedMethods ( BeforeClass . class ) ; return befores . isEmpty ( ) ? statement : new RunBefores ( statement , befores , null ) ;
|
public class StatementExecutor { /** * Return the object associated with the id or null if none . This does a SQL
* { @ code SELECT col1 , col2 , . . . FROM . . . WHERE . . . = id } type query . */
public T queryForId ( DatabaseConnection databaseConnection , ID id , ObjectCache objectCache ) throws SQLException { } }
|
if ( mappedQueryForId == null ) { mappedQueryForId = MappedQueryForFieldEq . build ( dao , tableInfo , null ) ; } return mappedQueryForId . execute ( databaseConnection , id , objectCache ) ;
|
public class UIViewParameter { /** * < p class = " changed _ added _ 2_0 " > Call through to superclass { @ link
* UIInput # updateModel } then take the additional action of pushing
* the value into request scope if and only if the value is not a
* value expression , is valid , and the local value was set on this
* lifecycle execution . < / p >
* @ since 2.0 */
@ Override public void updateModel ( FacesContext context ) { } }
|
super . updateModel ( context ) ; if ( ! hasValueExpression ( ) && isValid ( ) && isLocalValueSet ( ) ) { // QUESTION should this be done even when a value expression is present ?
// ANSWER : I don ' t see why not .
context . getExternalContext ( ) . getRequestMap ( ) . put ( getName ( ) , getLocalValue ( ) ) ; }
|
public class ImageIOHelper { /** * Converts < code > BufferedImage < / code > to < code > ByteBuffer < / code > .
* @ param bi Input image
* @ return pixel data */
public static ByteBuffer convertImageData ( BufferedImage bi ) { } }
|
DataBuffer buff = bi . getRaster ( ) . getDataBuffer ( ) ; // ClassCastException thrown if buff not instanceof DataBufferByte because raster data is not necessarily bytes .
// Convert the original buffered image to grayscale .
if ( ! ( buff instanceof DataBufferByte ) ) { BufferedImage grayscaleImage = ImageHelper . convertImageToGrayscale ( bi ) ; buff = grayscaleImage . getRaster ( ) . getDataBuffer ( ) ; } byte [ ] pixelData = ( ( DataBufferByte ) buff ) . getData ( ) ; // return ByteBuffer . wrap ( pixelData ) ;
ByteBuffer buf = ByteBuffer . allocateDirect ( pixelData . length ) ; buf . order ( ByteOrder . nativeOrder ( ) ) ; buf . put ( pixelData ) ; ( ( Buffer ) buf ) . flip ( ) ; return buf ;
|
public class CronUtils { /** * Converting valid SauronSoftware cron expression to valid Quartz one .
* The conversions are the following :
* < ul > < li > add & quot ; seconds & quot ; part ; < / li >
* < li > numbers in & quot ; day of week & quot ; started from 1 , not from 0 as in Sauron ; < / li >
* < li > & quot ; * & # 47 ; interval & quot ; items converted to
* & quot ; / interval & quot ; items ; < / li >
* < li > one of date and day of week should be a question mark . < / li >
* < / ul >
* @ param sauronExprValid SauronSoftware cron expression
* @ returnSimilar Quartz cron expression */
public static String unpackSchedule ( String sauronExpr ) { } }
|
if ( sauronExpr == null ) return null ; String [ ] exprElems = sauronExpr . trim ( ) . split ( "\\s+" ) ; if ( exprElems . length == 5 ) { // 1 . Increase number od days in " days of week "
exprElems [ 4 ] = increaseDoW ( exprElems [ 4 ] ) ; // 2 . Cut right end of an interval in repeating items
exprElems [ 0 ] = shrinkRepeating ( exprElems [ 0 ] , "0" ) ; exprElems [ 1 ] = shrinkRepeating ( exprElems [ 1 ] , "0" ) ; exprElems [ 2 ] = shrinkRepeating ( exprElems [ 2 ] , "1" ) ; exprElems [ 3 ] = shrinkRepeating ( exprElems [ 3 ] , "1" ) ; exprElems [ 4 ] = shrinkRepeating ( exprElems [ 4 ] , "SUN" ) ; // 3 . " Last " processing and question marks inserting
if ( ! "*" . equals ( exprElems [ 4 ] ) ) { if ( exprElems [ 2 ] . indexOf ( 'L' ) >= 0 && exprElems [ 4 ] . indexOf ( '-' ) == - 1 && exprElems [ 4 ] . indexOf ( '/' ) == - 1 ) { exprElems [ 4 ] = exprElems [ 4 ] + "L" ; } exprElems [ 2 ] = "?" ; } else { exprElems [ 4 ] = "?" ; } // 4 . Add seconds part
return concat ( ' ' , "0" , exprElems [ 0 ] , exprElems [ 1 ] , exprElems [ 2 ] , exprElems [ 3 ] , exprElems [ 4 ] ) ; } else { return sauronExpr ; }
|
public class JobSchedulesImpl { /** * Deletes a job schedule from the specified account .
* When you delete a job schedule , this also deletes all jobs and tasks under that schedule . When tasks are deleted , all the files in their working directories on the compute nodes are also deleted ( the retention period is ignored ) . The job schedule statistics are no longer accessible once the job schedule is deleted , though they are still counted towards account lifetime statistics .
* @ param jobScheduleId The ID of the job schedule to delete .
* @ param jobScheduleDeleteOptions Additional parameters for the operation
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ throws BatchErrorException thrown if the request is rejected by server
* @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent */
public void delete ( String jobScheduleId , JobScheduleDeleteOptions jobScheduleDeleteOptions ) { } }
|
deleteWithServiceResponseAsync ( jobScheduleId , jobScheduleDeleteOptions ) . toBlocking ( ) . single ( ) . body ( ) ;
|
public class DefaultCloudController { /** * Events */
@ Override public RestCollection < Event > getEvents ( Token token , EventQueryAttribute queryAttribute , String queryValue ) { } }
|
final ResultIterator < Event > iterator = new ResultIterator < > ( token , V2_EVENTS , Event . class , queryAttribute , queryValue ) ; return new RestCollection < > ( iterator . getSize ( ) , iterator ) ;
|
public class Record { /** * Pop the record number from the front of the list , and parse it to ensure that
* it is a valid integer .
* @ param list MPX record */
private void setRecordNumber ( LinkedList < String > list ) { } }
|
try { String number = list . remove ( 0 ) ; m_recordNumber = Integer . valueOf ( number ) ; } catch ( NumberFormatException ex ) { // Malformed MPX file : the record number isn ' t a valid integer
// Catch the exception here , leaving m _ recordNumber as null
// so we will skip this record entirely .
}
|
public class CPDefinitionSpecificationOptionValuePersistenceImpl { /** * Returns the first cp definition specification option value in the ordered set where CPDefinitionId = & # 63 ; and CPOptionCategoryId = & # 63 ; .
* @ param CPDefinitionId the cp definition ID
* @ param CPOptionCategoryId the cp option category ID
* @ param orderByComparator the comparator to order the set by ( optionally < code > null < / code > )
* @ return the first matching cp definition specification option value , or < code > null < / code > if a matching cp definition specification option value could not be found */
@ Override public CPDefinitionSpecificationOptionValue fetchByC_COC_First ( long CPDefinitionId , long CPOptionCategoryId , OrderByComparator < CPDefinitionSpecificationOptionValue > orderByComparator ) { } }
|
List < CPDefinitionSpecificationOptionValue > list = findByC_COC ( CPDefinitionId , CPOptionCategoryId , 0 , 1 , orderByComparator ) ; if ( ! list . isEmpty ( ) ) { return list . get ( 0 ) ; } return null ;
|
public class DBIDUtil { /** * Compute the set intersection of two sets .
* @ param first First set
* @ param second Second set
* @ return result . */
private static ModifiableDBIDs internalIntersection ( DBIDs first , DBIDs second ) { } }
|
second = second . size ( ) > 16 && ! ( second instanceof SetDBIDs ) ? newHashSet ( second ) : second ; ModifiableDBIDs inter = newHashSet ( first . size ( ) ) ; for ( DBIDIter it = first . iter ( ) ; it . valid ( ) ; it . advance ( ) ) { if ( second . contains ( it ) ) { inter . add ( it ) ; } } return inter ;
|
public class GrouperEntityGroupStore { /** * / * ( non - Javadoc )
* @ see org . apereo . portal . groups . IEntityGroupStore # findEntitiesForGroup ( org . apereo . portal . groups . IEntityGroup ) */
@ Override @ SuppressWarnings ( "unchecked" ) public Iterator findEntitiesForGroup ( IEntityGroup group ) throws GroupsException { } }
|
if ( LOGGER . isDebugEnabled ( ) ) { LOGGER . debug ( "Searching Grouper for members of the group with key: " + group . getKey ( ) ) ; } try { // execute a search for members of the specified group
GcGetMembers getGroupsMembers = new GcGetMembers ( ) ; getGroupsMembers . addGroupName ( group . getLocalKey ( ) ) ; getGroupsMembers . assignIncludeSubjectDetail ( true ) ; WsGetMembersResults results = getGroupsMembers . execute ( ) ; if ( results == null || results . getResults ( ) == null || results . getResults ( ) . length == 0 || results . getResults ( ) [ 0 ] . getWsSubjects ( ) == null ) { LOGGER . debug ( "No members found for Grouper group with key " + group . getLocalKey ( ) ) ; return Collections . < IGroupMember > emptyList ( ) . iterator ( ) ; } WsSubject [ ] gInfos = results . getResults ( ) [ 0 ] . getWsSubjects ( ) ; final List < IGroupMember > members = new ArrayList < IGroupMember > ( gInfos . length ) ; // add each result to the member list
for ( WsSubject gInfo : gInfos ) { // if the member is not a group ( aka person )
if ( ! StringUtils . equals ( gInfo . getSourceId ( ) , "g:gsa" ) ) { if ( LOGGER . isDebugEnabled ( ) ) { LOGGER . debug ( "creating leaf member:" + gInfo . getId ( ) + " and name: " + gInfo . getName ( ) + " from group: " + group . getLocalKey ( ) ) ; } // use the name instead of id as it shows better in the display
IGroupMember member = new EntityImpl ( gInfo . getName ( ) , IPerson . class ) ; members . add ( member ) ; } } // return an iterator for the assembled group
return members . iterator ( ) ; } catch ( Exception e ) { LOGGER . warn ( "Exception while attempting to retrieve " + "member entities of group with key " + group . getKey ( ) + " from Grouper web services: " + e . getMessage ( ) ) ; return Collections . < IGroupMember > emptyList ( ) . iterator ( ) ; }
|
public class JavascriptRuntime { /** * Execute the specified command returning a value ( if any )
* @ param command The JavaScript command to execute
* @ return The underlying JavaScript object that was returned by the script . */
@ Override public JSObject execute ( String command ) { } }
|
Object returnValue = engine . executeScript ( command ) ; if ( returnValue instanceof JSObject ) { return ( JSObject ) returnValue ; } return null ;
|
public class Roster { /** * Remove a subscribe listener . Also restores the previous subscription mode
* state , if the last listener got removed .
* @ param subscribeListener
* the subscribe listener to remove .
* @ return < code > true < / code > if the listener registered and got removed .
* @ since 4.2 */
public boolean removeSubscribeListener ( SubscribeListener subscribeListener ) { } }
|
boolean removed = subscribeListeners . remove ( subscribeListener ) ; if ( removed && subscribeListeners . isEmpty ( ) ) { setSubscriptionMode ( previousSubscriptionMode ) ; } return removed ;
|
public class Payload { /** * Puts a property in a JSONObject , while possibly checking for estimated payload size violation .
* @ param propertyName the name of the property to use for calculating the estimation
* @ param propertyValue the value of the property to use for calculating the estimation
* @ param object the JSONObject to put the property in
* @ param opt true to use putOpt , false to use put
* @ throws JSONException */
protected void put ( String propertyName , Object propertyValue , JSONObject object , boolean opt ) throws JSONException { } }
|
try { if ( isPayloadSizeEstimatedWhenAdding ( ) ) { int maximumPayloadSize = getMaximumPayloadSize ( ) ; int estimatedPayloadSize = estimatePayloadSizeAfterAdding ( propertyName , propertyValue ) ; boolean estimatedToExceed = estimatedPayloadSize > maximumPayloadSize ; if ( estimatedToExceed ) throw new PayloadMaxSizeProbablyExceededException ( maximumPayloadSize , estimatedPayloadSize ) ; } } catch ( PayloadMaxSizeProbablyExceededException e ) { throw e ; } catch ( Exception e ) { } if ( opt ) object . putOpt ( propertyName , propertyValue ) ; else object . put ( propertyName , propertyValue ) ;
|
public class RepositoryImpl { /** * { @ inheritDoc } */
public Session login ( final Credentials credentials , String workspaceName ) throws LoginException , NoSuchWorkspaceException , RepositoryException { } }
|
if ( getState ( ) == OFFLINE ) { LOG . warn ( "Repository " + getName ( ) + " is OFFLINE." ) ; } ConversationState state ; PrivilegedExceptionAction < ConversationState > action = new PrivilegedExceptionAction < ConversationState > ( ) { public ConversationState run ( ) throws Exception { if ( credentials != null ) { return authenticationPolicy . authenticate ( credentials ) ; } else { return authenticationPolicy . authenticate ( ) ; } } } ; try { state = SecurityHelper . doPrivilegedExceptionAction ( action ) ; } catch ( PrivilegedActionException pae ) { Throwable cause = pae . getCause ( ) ; if ( cause instanceof LoginException ) { throw ( LoginException ) cause ; } else if ( cause instanceof RuntimeException ) { throw ( RuntimeException ) cause ; } else { throw new RuntimeException ( cause ) ; } } return internalLogin ( state , workspaceName ) ;
|
public class BaseAverager { /** * / * ( non - Javadoc )
* @ see Averager # addElement ( java . util . Map , java . util . Map ) */
@ SuppressWarnings ( "unchecked" ) @ Override public void addElement ( Map < String , Object > e , Map < String , AggregatorFactory > a ) { } }
|
Object metric = e . get ( fieldName ) ; I finalMetric ; if ( a . containsKey ( fieldName ) ) { AggregatorFactory af = a . get ( fieldName ) ; finalMetric = metric != null ? ( I ) af . finalizeComputation ( metric ) : null ; } else { finalMetric = ( I ) metric ; } buckets [ index ++ ] = finalMetric ; index %= numBuckets ;
|
public class KTypeHashSet { /** * Ensure this container can hold at least the
* given number of elements without resizing its buffers .
* @ param expectedElements The total number of elements , inclusive . */
@ Override public void ensureCapacity ( int expectedElements ) { } }
|
if ( expectedElements > resizeAt || keys == null ) { final KType [ ] prevKeys = Intrinsics . < KType [ ] > cast ( this . keys ) ; allocateBuffers ( minBufferSize ( expectedElements , loadFactor ) ) ; if ( prevKeys != null && ! isEmpty ( ) ) { rehash ( prevKeys ) ; } }
|
public class AbstractWisdomSourceWatcherMojo { /** * Check if we can create a model from the given source .
* @ param file { @ link File } required to be processed by this plugin .
* @ return < code > true < / code > if the < code > file < / code > implements
* { @ link org . wisdom . api . Controller } , < code > false < / code > otherwise . */
public boolean accept ( File file ) { } }
|
if ( ! WatcherUtils . isInDirectory ( file , javaSourceDir ) || ! WatcherUtils . hasExtension ( file , "java" ) ) { return false ; } // If the file has been deleted by may have been a controller , return true .
// The cleanup will be applied .
if ( ! file . isFile ( ) ) { return true ; } // Parse the Java File and check if it ' s a wisdom Controller
try { final CompilationUnit parse = JavaParser . parse ( file ) ; // The visitor return a Boolean object , potentially null .
final Boolean accept = parse . accept ( CLASS_VISITOR , null ) ; return accept != null && accept ; } catch ( Exception e ) { getLog ( ) . error ( "Cannot parse " + file . getAbsolutePath ( ) , e ) ; return false ; }
|
public class DateUtils { /** * Convert an Object to a Timestamp , without an Exception */
public static java . sql . Timestamp getTimestamp ( Object value ) { } }
|
try { return toTimestamp ( value ) ; } catch ( ParseException pe ) { pe . printStackTrace ( ) ; return null ; }
|
public class PKLogResource { /** * POST request to webServiceURL / version / log */
@ Post ( "json" ) public final Representation postLogMessage ( final Representation entity ) { } }
|
try { LOGGER . debug ( "postLogMessage: Log {}" , entity . getText ( ) ) ; } catch ( IOException e ) { // TODO Auto - generated catch block
e . printStackTrace ( ) ; } return handleLogRequest ( entity ) ;
|
public class RoadNetworkLayerConstants { /** * Set if the internal data structure used to store the road network
* may be drawn on the displayers .
* @ param draw is < code > true < / code > if the internal data structures may be
* drawn on the displayers , < code > false < / code > if the internal data structures
* may not be drawn on the displayers , < code > null < / code > to restore
* the default value given by { @ link # DEFAULT _ ROAD _ INTERN _ DRAWING } . */
public static void setPreferredRoadInternDrawing ( Boolean draw ) { } }
|
final Preferences prefs = Preferences . userNodeForPackage ( RoadNetworkLayerConstants . class ) ; if ( prefs != null ) { if ( draw == null ) { prefs . remove ( "ROAD_INTERN_DRAWING" ) ; // $ NON - NLS - 1 $
} else { prefs . putBoolean ( "ROAD_INTERN_DRAWING" , draw ) ; // $ NON - NLS - 1 $
} try { prefs . flush ( ) ; } catch ( BackingStoreException exception ) { } }
|
public class SeaGlassStyle { /** * Initializes the given < code > Values < / code > object with the defaults
* contained in the given TreeMap .
* @ param v The Values object to be initialized
* @ param myDefaults a map of UIDefaults to use in initializing the Values .
* This map must contain only keys associated with this
* Style . */
private void init ( Values v , TreeMap < String , Object > myDefaults ) { } }
|
// a list of the different types of states used by this style . This
// list may contain only " standard " states ( those defined by Synth ) ,
// or it may contain custom states , or it may contain only " standard "
// states but list them in a non - standard order .
List < State > states = new ArrayList < State > ( ) ; // a map of state name to code
Map < String , Integer > stateCodes = new HashMap < String , Integer > ( ) ; // This is a list of runtime " state " context objects . These contain
// the values associated with each state .
List < RuntimeState > runtimeStates = new ArrayList < RuntimeState > ( ) ; // determine whether there are any custom states , or custom state
// order . If so , then read all those custom states and define the
// " values " stateTypes to be a non - null array .
// Otherwise , let the " values " stateTypes be null to indicate that
// there are no custom states or custom state ordering
String statesString = ( String ) myDefaults . get ( prefix + ".States" ) ; if ( statesString != null ) { String [ ] s = statesString . split ( "," ) ; for ( int i = 0 ; i < s . length ; i ++ ) { s [ i ] = s [ i ] . trim ( ) ; if ( ! State . isStandardStateName ( s [ i ] ) ) { // this is a non - standard state name , so look for the
// custom state associated with it
String stateName = prefix + "." + s [ i ] ; State customState = ( State ) myDefaults . get ( stateName ) ; if ( customState != null ) { states . add ( customState ) ; } } else { states . add ( State . getStandardState ( s [ i ] ) ) ; } } // if there were any states defined , then set the stateTypes array
// to be non - null . Otherwise , leave it null ( meaning , use the
// standard synth states ) .
if ( states . size ( ) > 0 ) { v . stateTypes = states . toArray ( new State [ states . size ( ) ] ) ; } // assign codes for each of the state types
int code = 1 ; for ( State state : states ) { stateCodes . put ( state . getName ( ) , code ) ; code <<= 1 ; } } else { // since there were no custom states defined , setup the list of
// standard synth states . Note that the " v . stateTypes " is not
// being set here , indicating that at runtime the state selection
// routines should use standard synth states instead of custom
// states . I do need to popuplate this temp list now though , so that
// the remainder of this method will function as expected .
states . add ( State . Enabled ) ; states . add ( State . MouseOver ) ; states . add ( State . Pressed ) ; states . add ( State . Disabled ) ; states . add ( State . Focused ) ; states . add ( State . Selected ) ; states . add ( State . Default ) ; // assign codes for the states
stateCodes . put ( "Enabled" , ENABLED ) ; stateCodes . put ( "MouseOver" , MOUSE_OVER ) ; stateCodes . put ( "Pressed" , PRESSED ) ; stateCodes . put ( "Disabled" , DISABLED ) ; stateCodes . put ( "Focused" , FOCUSED ) ; stateCodes . put ( "Selected" , SELECTED ) ; stateCodes . put ( "Default" , DEFAULT ) ; } // Now iterate over all the keys in the defaults table
for ( String key : myDefaults . keySet ( ) ) { // The key is something like JButton . Enabled . backgroundPainter ,
// or JButton . States , or JButton . background .
// Remove the " JButton . " portion of the key
String temp = key . substring ( prefix . length ( ) ) ; // if there is a " or : then we skip it because it is a subregion
// of some kind
if ( temp . indexOf ( '"' ) != - 1 || temp . indexOf ( ':' ) != - 1 ) continue ; // remove the separator
temp = temp . substring ( 1 ) ; // At this point , temp may be any of the following :
// background
// [ Enabled ] . background
// [ Enabled + MouseOver ] . background
// property . foo
// parse out the states and the property
String stateString = null ; String property = null ; int bracketIndex = temp . indexOf ( ']' ) ; if ( bracketIndex < 0 ) { // there is not a state string , so property = temp
property = temp ; } else { stateString = temp . substring ( 0 , bracketIndex ) ; property = temp . substring ( bracketIndex + 2 ) ; } // now that I have the state ( if any ) and the property , get the
// value for this property and install it where it belongs
if ( stateString == null ) { // there was no state , just a property . Check for the custom
// " contentMargins " property ( which is handled specially by
// Synth / SeaGlass ) . Also check for the property being " States " ,
// in which case it is not a real property and should be
// ignored .
// otherwise , assume it is a property and install it on the
// values object
if ( "contentMargins" . equals ( property ) ) { v . contentMargins = ( Insets ) myDefaults . get ( key ) ; } else if ( "States" . equals ( property ) ) { // ignore
} else { v . defaults . put ( property , myDefaults . get ( key ) ) ; } } else { // it is possible that the developer has a malformed UIDefaults
// entry , such that something was specified in the place of
// the State portion of the key but it wasn ' t a state . In this
// case , skip will be set to true
boolean skip = false ; // this variable keeps track of the int value associated with
// the state . See SynthState for details .
int componentState = 0 ; // Multiple states may be specified in the string , such as
// Enabled + MouseOver
String [ ] stateParts = stateString . split ( "\\+" ) ; // For each state , we need to find the State object associated
// with it , or skip it if it cannot be found .
for ( String s : stateParts ) { if ( stateCodes . containsKey ( s ) ) { componentState |= stateCodes . get ( s ) ; } else { // Was not a state . Maybe it was a subregion or
// something
// skip it .
skip = true ; break ; } } if ( skip ) continue ; // find the RuntimeState for this State
RuntimeState rs = null ; for ( RuntimeState s : runtimeStates ) { if ( s . state == componentState ) { rs = s ; break ; } } // couldn ' t find the runtime state , so create a new one
if ( rs == null ) { rs = new RuntimeState ( componentState , stateString ) ; runtimeStates . add ( rs ) ; } // check for a couple special properties , such as for the
// painters . If these are found , then set the specially on
// the runtime state . Else , it is just a normal property ,
// so put it in the UIDefaults associated with that runtime
// state
if ( "backgroundPainter" . equals ( property ) ) { rs . backgroundPainter = getPainter ( myDefaults , key ) ; } else if ( "foregroundPainter" . equals ( property ) ) { rs . foregroundPainter = getPainter ( myDefaults , key ) ; } else if ( "borderPainter" . equals ( property ) ) { rs . borderPainter = getPainter ( myDefaults , key ) ; } else { rs . defaults . put ( property , myDefaults . get ( key ) ) ; } } } // now that I ' ve collected all the runtime states , I ' ll sort them based
// on their integer " state " ( see SynthState for how this works ) .
Collections . sort ( runtimeStates , STATE_COMPARATOR ) ; // finally , set the array of runtime states on the values object
v . states = runtimeStates . toArray ( new RuntimeState [ runtimeStates . size ( ) ] ) ;
|
public class BitfinexCurrencyPair { /** * Registers currency pair for use within library
* @ param currency currency ( from )
* @ param profitCurrency currency ( to )
* @ param minimalOrderSize minimal order size
* @ return registered instance of { @ link BitfinexCurrencyPair } */
public static BitfinexCurrencyPair register ( final String currency , final String profitCurrency , final double minimalOrderSize ) { } }
|
final String key = buildCacheKey ( currency , profitCurrency ) ; final BitfinexCurrencyPair newCurrency = new BitfinexCurrencyPair ( currency , profitCurrency , minimalOrderSize ) ; final BitfinexCurrencyPair oldCurrency = instances . putIfAbsent ( key , newCurrency ) ; // The currency was already registered
if ( oldCurrency != null ) { throw new IllegalArgumentException ( "The currency " + key + " is already known" ) ; } return newCurrency ;
|
public class AtomPlacer3D { /** * Takes the given Z Matrix coordinates and converts them to cartesian coordinates .
* The first Atom end up in the origin , the second on on the x axis , and the third
* one in the XY plane . The rest is added by applying the Zmatrix distances , angles
* and dihedrals . Assign coordinates directly to the atoms .
* @ param molecule the molecule to be placed in 3D
* @ param flagBranched marks branched chain
* author : egonw , cho */
public void zmatrixChainToCartesian ( IAtomContainer molecule , boolean flagBranched ) { } }
|
Point3d result = null ; for ( int index = 0 ; index < distances . length ; index ++ ) { if ( index == 0 ) { result = new Point3d ( 0d , 0d , 0d ) ; } else if ( index == 1 ) { result = new Point3d ( distances [ 1 ] , 0d , 0d ) ; } else if ( index == 2 ) { result = new Point3d ( - Math . cos ( ( angles [ 2 ] / 180 ) * Math . PI ) * distances [ 2 ] + distances [ 1 ] , Math . sin ( ( angles [ 2 ] / 180 ) * Math . PI ) * distances [ 2 ] , 0d ) ; } else { Vector3d cd = new Vector3d ( ) ; cd . sub ( molecule . getAtom ( thirdAtoms [ index ] ) . getPoint3d ( ) , molecule . getAtom ( secondAtoms [ index ] ) . getPoint3d ( ) ) ; Vector3d bc = new Vector3d ( ) ; bc . sub ( molecule . getAtom ( secondAtoms [ index ] ) . getPoint3d ( ) , molecule . getAtom ( firstAtoms [ index - 3 ] ) . getPoint3d ( ) ) ; Vector3d n1 = new Vector3d ( ) ; n1 . cross ( cd , bc ) ; n1 . normalize ( ) ; Vector3d n2 = null ; if ( index == 3 && flagBranched ) { n2 = AtomTetrahedralLigandPlacer3D . rotate ( n1 , bc , DIHEDRAL_BRANCHED_CHAIN ) ; } else { n2 = AtomTetrahedralLigandPlacer3D . rotate ( n1 , bc , dihedrals [ index ] ) ; } n2 . normalize ( ) ; Vector3d ba = new Vector3d ( ) ; if ( index == 3 && flagBranched ) { ba = AtomTetrahedralLigandPlacer3D . rotate ( cd , n2 , ( - angles [ index ] / 180 ) * Math . PI ) ; ba = AtomTetrahedralLigandPlacer3D . rotate ( ba , cd , ( - angles [ index ] / 180 ) * Math . PI ) ; } else { ba = AtomTetrahedralLigandPlacer3D . rotate ( cd , n2 , ( - angles [ index ] / 180 ) * Math . PI ) ; } ba . normalize ( ) ; Vector3d ban = new Vector3d ( ba ) ; ban . scale ( distances [ index ] ) ; result = new Point3d ( ) ; result . add ( molecule . getAtom ( firstAtoms [ index - 1 ] ) . getPoint3d ( ) , ban ) ; } IAtom atom = molecule . getAtom ( firstAtoms [ index ] ) ; if ( ( atom . getPoint3d ( ) == null || ! atom . getFlag ( CDKConstants . ISPLACED ) ) && ! atom . getFlag ( CDKConstants . ISINRING ) && isHeavyAtom ( atom ) ) { atom . setPoint3d ( result ) ; atom . setFlag ( CDKConstants . ISPLACED , true ) ; } }
|
public class StatusConverter { /** * Returns a { @ link io . grpc . Status . Code } from a { @ link io . opencensus . trace . Status . CanonicalCode } .
* @ param opencensusCanonicalCode the given { @ code io . opencensus . trace . Status . CanonicalCode } .
* @ return a { @ code io . grpc . Status . Code } from a { @ code io . opencensus . trace . Status . CanonicalCode } .
* @ since 0.6 */
public static io . grpc . Status . Code toGrpcCode ( io . opencensus . trace . Status . CanonicalCode opencensusCanonicalCode ) { } }
|
return grpcStatusFromOpencensusCanonicalCode ( opencensusCanonicalCode ) . getCode ( ) ;
|
public class GenericWindowContainer { /** * 为一致概念 , 这里麻烦一点 , 请您调用 : { @ link # getInvocation ( ) # addModel ( String , Object ) }
* 来完成 现在是2010-08-04 , 正常情况下2010国庆后将去掉此代码 */
@ Deprecated @ Override public void addModel ( String name , Object value ) { } }
|
getInvocation ( ) . addModel ( name , value ) ;
|
public class LabAccountsInner { /** * Create a lab in a lab account .
* @ param resourceGroupName The name of the resource group .
* @ param labAccountName The name of the lab Account .
* @ param createLabProperties Properties for creating a managed lab and a default environment setting
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ throws CloudException thrown if the request is rejected by server
* @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent */
public void createLab ( String resourceGroupName , String labAccountName , CreateLabProperties createLabProperties ) { } }
|
createLabWithServiceResponseAsync ( resourceGroupName , labAccountName , createLabProperties ) . toBlocking ( ) . single ( ) . body ( ) ;
|
public class DuckType { /** * Causes object to implement the interfaceToImplement and returns
* an instance of the object implementing interfaceToImplement even
* if interfaceToImplement was not declared in object . getClass ( ) ' s
* implements declaration .
* This works as long as all methods declared in interfaceToImplement
* are present on object .
* @ param type The Java class of the interface to implement
* @ param object The object to force to implement interfaceToImplement
* @ param < T > a T object .
* @ return a T object . */
public static < T > T implement ( Class < T > type , Object object ) { } }
|
if ( type . isInstance ( object ) ) return type . cast ( object ) ; return type . cast ( Proxy . newProxyInstance ( type . getClassLoader ( ) , new Class [ ] { type } , new DuckType ( object ) ) ) ;
|
public class ResolverConfig { /** * Parses the output of winipcfg or ipconfig . */
private void findWin ( InputStream in ) { } }
|
String packageName = ResolverConfig . class . getPackage ( ) . getName ( ) ; String resPackageName = packageName + ".windows.DNSServer" ; ResourceBundle res = ResourceBundle . getBundle ( resPackageName ) ; String host_name = res . getString ( "host_name" ) ; String primary_dns_suffix = res . getString ( "primary_dns_suffix" ) ; String dns_suffix = res . getString ( "dns_suffix" ) ; String dns_servers = res . getString ( "dns_servers" ) ; BufferedReader br = new BufferedReader ( new InputStreamReader ( in ) ) ; try { List lserver = new ArrayList ( ) ; List lsearch = new ArrayList ( ) ; String line = null ; boolean readingServers = false ; boolean readingSearches = false ; while ( ( line = br . readLine ( ) ) != null ) { StringTokenizer st = new StringTokenizer ( line ) ; if ( ! st . hasMoreTokens ( ) ) { readingServers = false ; readingSearches = false ; continue ; } String s = st . nextToken ( ) ; if ( line . indexOf ( ":" ) != - 1 ) { readingServers = false ; readingSearches = false ; } if ( line . indexOf ( host_name ) != - 1 ) { while ( st . hasMoreTokens ( ) ) s = st . nextToken ( ) ; Name name ; try { name = Name . fromString ( s , null ) ; } catch ( TextParseException e ) { continue ; } if ( name . labels ( ) == 1 ) continue ; addSearch ( s , lsearch ) ; } else if ( line . indexOf ( primary_dns_suffix ) != - 1 ) { while ( st . hasMoreTokens ( ) ) s = st . nextToken ( ) ; if ( s . equals ( ":" ) ) continue ; addSearch ( s , lsearch ) ; readingSearches = true ; } else if ( readingSearches || line . indexOf ( dns_suffix ) != - 1 ) { while ( st . hasMoreTokens ( ) ) s = st . nextToken ( ) ; if ( s . equals ( ":" ) ) continue ; addSearch ( s , lsearch ) ; readingSearches = true ; } else if ( readingServers || line . indexOf ( dns_servers ) != - 1 ) { while ( st . hasMoreTokens ( ) ) s = st . nextToken ( ) ; if ( s . equals ( ":" ) ) continue ; addServer ( s , lserver ) ; readingServers = true ; } } configureFromLists ( lserver , lsearch ) ; } catch ( IOException e ) { } finally { try { br . close ( ) ; } catch ( IOException e ) { } } return ;
|
public class SQSSession { /** * This method is not supported . */
@ Override public QueueBrowser createBrowser ( Queue queue , String messageSelector ) throws JMSException { } }
|
throw new JMSException ( SQSMessagingClientConstants . UNSUPPORTED_METHOD ) ;
|
public class CmsDriverManager { /** * Deletes an entry in the published resource table . < p >
* @ param dbc the current database context
* @ param resourceName The name of the resource to be deleted in the static export
* @ param linkType the type of resource deleted ( 0 = non - parameter , 1 = parameter )
* @ param linkParameter the parameters of the resource
* @ throws CmsException if something goes wrong */
public void deleteStaticExportPublishedResource ( CmsDbContext dbc , String resourceName , int linkType , String linkParameter ) throws CmsException { } }
|
getProjectDriver ( dbc ) . deleteStaticExportPublishedResource ( dbc , resourceName , linkType , linkParameter ) ;
|
public class PMTags { /** * Builds a jpm url based on given url
* @ param session Session user . Cannot be null
* @ param url Url to be built , < b > without < / b > context path
* @ return ready to use url */
public static String url ( PMSession session , String url ) { } }
|
return url ( session , url , false ) ;
|
public class BProgramJsProxy { /** * Where the actual Behavioral Programming synchronization point is done .
* @ param jsRWB The JavaScript object { @ code { request : . . . waitFor : . . . } }
* @ param hot { @ code True } if this should be a " hot " synchronization point .
* @ param data Optional extra data the synchronizing b - thread may want to add . */
@ Override void synchronizationPoint ( NativeObject jsRWB , Boolean hot , Object data ) { } }
|
Map < String , Object > jRWB = ( Map ) Context . jsToJava ( jsRWB , Map . class ) ; SyncStatement stmt = SyncStatement . make ( ) ; if ( hot != null ) { stmt = stmt . hot ( hot ) ; } Object req = jRWB . get ( "request" ) ; if ( req != null ) { if ( req instanceof BEvent ) { stmt = stmt . request ( ( BEvent ) req ) ; } else if ( req instanceof NativeArray ) { NativeArray arr = ( NativeArray ) req ; stmt = stmt . request ( Arrays . asList ( arr . getIndexIds ( ) ) . stream ( ) . map ( i -> ( BEvent ) arr . get ( i ) ) . collect ( toList ( ) ) ) ; } } EventSet waitForSet = convertToEventSet ( jRWB . get ( "waitFor" ) ) ; EventSet blockSet = convertToEventSet ( jRWB . get ( "block" ) ) ; EventSet interruptSet = convertToEventSet ( jRWB . get ( "interrupt" ) ) ; stmt = stmt . waitFor ( waitForSet ) . block ( blockSet ) . interrupt ( interruptSet ) . data ( data ) ; boolean hasCollision = stmt . getRequest ( ) . stream ( ) . anyMatch ( blockSet :: contains ) ; if ( hasCollision ) { System . err . println ( "Warning: B-thread is blocking an event it is also requesting, this may lead to a deadlock." ) ; } captureBThreadState ( stmt ) ;
|
public class BreakPanel { /** * Sets the location of the break buttons .
* If the location is already set and the main tool bar visibility is the same , no change is done .
* @ param location the location to set */
void setButtonsLocation ( int location ) { } }
|
if ( currentButtonsLocation == location ) { mainBreakButtons . setVisible ( location == 0 && isMainToolBarHidden ( ) ) ; return ; } currentButtonsLocation = location ; switch ( location ) { case 0 : requestBreakButtons . setVisible ( false ) ; responseBreakButtons . setVisible ( false ) ; setToolbarButtonsVisible ( true ) ; // If the user decided to disable the main toolbar , the break
// buttons have to be force to be displayed in the break panel
mainBreakButtons . setVisible ( isMainToolBarHidden ( ) ) ; break ; case 1 : case 2 : requestBreakButtons . setVisible ( true ) ; responseBreakButtons . setVisible ( true ) ; setToolbarButtonsVisible ( location == 2 ) ; mainBreakButtons . setVisible ( false ) ; break ; default : setToolbarButtonsVisible ( true ) ; }
|
public class VpnTunnelClient { /** * Deletes the specified VpnTunnel resource .
* < p > Sample code :
* < pre > < code >
* try ( VpnTunnelClient vpnTunnelClient = VpnTunnelClient . create ( ) ) {
* ProjectRegionVpnTunnelName vpnTunnel = ProjectRegionVpnTunnelName . of ( " [ PROJECT ] " , " [ REGION ] " , " [ VPN _ TUNNEL ] " ) ;
* Operation response = vpnTunnelClient . deleteVpnTunnel ( vpnTunnel ) ;
* < / code > < / pre >
* @ param vpnTunnel Name of the VpnTunnel resource to delete .
* @ throws com . google . api . gax . rpc . ApiException if the remote call fails */
@ BetaApi public final Operation deleteVpnTunnel ( ProjectRegionVpnTunnelName vpnTunnel ) { } }
|
DeleteVpnTunnelHttpRequest request = DeleteVpnTunnelHttpRequest . newBuilder ( ) . setVpnTunnel ( vpnTunnel == null ? null : vpnTunnel . toString ( ) ) . build ( ) ; return deleteVpnTunnel ( request ) ;
|
public class FreeVarCollector { /** * If tree refers to a class instance creation expression
* add all free variables of the freshly created class . */
public void visitNewClass ( JCNewClass tree ) { } }
|
ClassSymbol c = ( ClassSymbol ) tree . constructor . owner ; if ( tree . encl == null && c . hasOuterInstance ( ) && outerThisStack . head != null ) visitSymbol ( outerThisStack . head ) ; super . visitNewClass ( tree ) ;
|
public class UpdateIntegrationResponseRequest { /** * A key - value map specifying response parameters that are passed to the method response from the backend . The key
* is a method response header parameter name and the mapped value is an integration response header value , a static
* value enclosed within a pair of single quotes , or a JSON expression from the integration response body . The
* mapping key must match the pattern of method . response . header . { name } , where name is a valid and unique header
* name . The mapped non - static value must match the pattern of integration . response . header . { name } or
* integration . response . body . { JSON - expression } , where { name } is a valid and unique response header name and
* { JSON - expression } is a valid JSON expression without the $ prefix .
* @ param responseParameters
* A key - value map specifying response parameters that are passed to the method response from the backend .
* The key is a method response header parameter name and the mapped value is an integration response header
* value , a static value enclosed within a pair of single quotes , or a JSON expression from the integration
* response body . The mapping key must match the pattern of method . response . header . { name } , where name is a
* valid and unique header name . The mapped non - static value must match the pattern of
* integration . response . header . { name } or integration . response . body . { JSON - expression } , where { name } is a
* valid and unique response header name and { JSON - expression } is a valid JSON expression without the $
* prefix .
* @ return Returns a reference to this object so that method calls can be chained together . */
public UpdateIntegrationResponseRequest withResponseParameters ( java . util . Map < String , String > responseParameters ) { } }
|
setResponseParameters ( responseParameters ) ; return this ;
|
public class LayerUtil { /** * Transforms a list of MapElements , orders it and removes those elements that overlap .
* This operation is useful for an early elimination of elements in a list that will never
* be drawn because they overlap .
* @ param input list of MapElements
* @ return collision - free , ordered list , a subset of the input . */
public static List < MapElementContainer > collisionFreeOrdered ( List < MapElementContainer > input ) { } }
|
// sort items by priority ( highest first )
Collections . sort ( input , Collections . reverseOrder ( ) ) ; // in order of priority , see if an item can be drawn , i . e . none of the items
// in the currentItemsToDraw list clashes with it .
List < MapElementContainer > output = new LinkedList < MapElementContainer > ( ) ; for ( MapElementContainer item : input ) { boolean hasSpace = true ; for ( MapElementContainer outputElement : output ) { if ( outputElement . clashesWith ( item ) ) { hasSpace = false ; break ; } } if ( hasSpace ) { output . add ( item ) ; } } return output ;
|
public class HelloWorldSessionListener { /** * Fires whenever a new session is created . */
public void sessionCreated ( final HttpSessionEvent event ) { } }
|
counter ++ ; final HttpSession session = event . getSession ( ) ; final String id = session . getId ( ) ; SESSIONS . put ( id , session ) ;
|
public class GlobusGSSManagerImpl { /** * Acquires GSI GSS credentials . First , it tries to find the credentials
* in the private credential set of the current JAAS Subject . If the
* Subject is not set or credentials are not found in the Subject , it
* tries to get a default user credential ( usually an user proxy file )
* @ param lifetime Only lifetime set to
* { @ link GSSCredential # DEFAULT _ LIFETIME
* GSSCredential . DEFAULT _ LIFETIME } is allowed .
* @ see org . globus . gsi . X509Credential # getDefaultCredential ( ) */
public GSSCredential createCredential ( GSSName name , int lifetime , Oid mech , int usage ) throws GSSException { } }
|
checkMechanism ( mech ) ; if ( name != null ) { if ( name . isAnonymous ( ) ) { return new GlobusGSSCredentialImpl ( ) ; } else { throw new GSSException ( GSSException . UNAVAILABLE ) ; } } X509Credential cred = null ; Subject subject = JaasSubject . getCurrentSubject ( ) ; if ( subject != null ) { logger . debug ( "Getting credential from context" ) ; Set gssCreds = subject . getPrivateCredentials ( GlobusGSSCredentialImpl . class ) ; if ( gssCreds != null ) { Iterator iter = gssCreds . iterator ( ) ; if ( iter . hasNext ( ) ) { GlobusGSSCredentialImpl credImpl = ( GlobusGSSCredentialImpl ) iter . next ( ) ; cred = credImpl . getX509Credential ( ) ; } } } if ( lifetime == GSSCredential . INDEFINITE_LIFETIME || lifetime > 0 ) { // lifetime not supported
throw new GlobusGSSException ( GSSException . FAILURE , GlobusGSSException . BAD_ARGUMENT , "badLifetime01" ) ; } if ( cred == null ) { logger . debug ( "Getting default credential" ) ; try { cred = X509Credential . getDefaultCredential ( ) ; } catch ( CredentialException e ) { throw new GlobusGSSException ( GSSException . DEFECTIVE_CREDENTIAL , e ) ; } catch ( Exception e ) { throw new GlobusGSSException ( GSSException . DEFECTIVE_CREDENTIAL , e ) ; } return getDefaultCredential ( cred , usage ) ; } else { return new GlobusGSSCredentialImpl ( cred , usage ) ; }
|
public class CPMeasurementUnitPersistenceImpl { /** * Clears the cache for all cp measurement units .
* The { @ link EntityCache } and { @ link FinderCache } are both cleared by this method . */
@ Override public void clearCache ( ) { } }
|
entityCache . clearCache ( CPMeasurementUnitImpl . class ) ; finderCache . clearCache ( FINDER_CLASS_NAME_ENTITY ) ; finderCache . clearCache ( FINDER_CLASS_NAME_LIST_WITH_PAGINATION ) ; finderCache . clearCache ( FINDER_CLASS_NAME_LIST_WITHOUT_PAGINATION ) ;
|
public class AWSStepFunctionsClient { /** * Starts a state machine execution .
* < note >
* < code > StartExecution < / code > is idempotent . If < code > StartExecution < / code > is called with the same name and input
* as a running execution , the call will succeed and return the same response as the original request . If the
* execution is closed or if the input is different , it will return a 400 < code > ExecutionAlreadyExists < / code > error .
* Names can be reused after 90 days .
* < / note >
* @ param startExecutionRequest
* @ return Result of the StartExecution operation returned by the service .
* @ throws ExecutionLimitExceededException
* The maximum number of running executions has been reached . Running executions must end or be stopped
* before a new execution can be started .
* @ throws ExecutionAlreadyExistsException
* The execution has the same < code > name < / code > as another execution ( but a different < code > input < / code >
* ) . < / p > < note >
* Executions with the same < code > name < / code > and < code > input < / code > are considered idempotent .
* @ throws InvalidArnException
* The provided Amazon Resource Name ( ARN ) is invalid .
* @ throws InvalidExecutionInputException
* The provided JSON input data is invalid .
* @ throws InvalidNameException
* The provided name is invalid .
* @ throws StateMachineDoesNotExistException
* The specified state machine does not exist .
* @ throws StateMachineDeletingException
* The specified state machine is being deleted .
* @ sample AWSStepFunctions . StartExecution
* @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / states - 2016-11-23 / StartExecution " target = " _ top " > AWS API
* Documentation < / a > */
@ Override public StartExecutionResult startExecution ( StartExecutionRequest request ) { } }
|
request = beforeClientExecution ( request ) ; return executeStartExecution ( request ) ;
|
public class ClassUseWriter { /** * Add the package use information .
* @ param pkg the package that uses the given class
* @ param contentTree the content tree to which the package use information will be added */
protected void addPackageUse ( PackageDoc pkg , Content contentTree ) throws IOException { } }
|
Content tdFirst = HtmlTree . TD ( HtmlStyle . colFirst , getHyperLink ( pkg . name ( ) , new StringContent ( Util . getPackageName ( pkg ) ) ) ) ; contentTree . addContent ( tdFirst ) ; HtmlTree tdLast = new HtmlTree ( HtmlTag . TD ) ; tdLast . addStyle ( HtmlStyle . colLast ) ; addSummaryComment ( pkg , tdLast ) ; contentTree . addContent ( tdLast ) ;
|
public class ST_BoundingCircleCenter { /** * Compute the minimum bounding circle center of a geometry
* @ param geometry Any geometry
* @ return Minimum bounding circle center point */
public static Point getCircumCenter ( Geometry geometry ) { } }
|
if ( geometry == null || geometry . getNumPoints ( ) == 0 ) { return null ; } return geometry . getFactory ( ) . createPoint ( new MinimumBoundingCircle ( geometry ) . getCentre ( ) ) ;
|
public class Ifc4PackageImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
@ Override public EClass getIfcReinforcementDefinitionProperties ( ) { } }
|
if ( ifcReinforcementDefinitionPropertiesEClass == null ) { ifcReinforcementDefinitionPropertiesEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( Ifc4Package . eNS_URI ) . getEClassifiers ( ) . get ( 510 ) ; } return ifcReinforcementDefinitionPropertiesEClass ;
|
public class AdaptiveGrid { /** * Return the average number of solutions in the occupied hypercubes */
public double getAverageOccupation ( ) { } }
|
calculateOccupied ( ) ; double result ; if ( occupiedHypercubes ( ) == 0 ) { result = 0.0 ; } else { double sum = 0.0 ; for ( int value : occupied ) { sum += hypercubes [ value ] ; } result = sum / occupiedHypercubes ( ) ; } return result ;
|
public class FilePath { /** * Checks if the remote path is Unix . */
boolean isUnix ( ) { } }
|
// if the path represents a local path , there ' no need to guess .
if ( ! isRemote ( ) ) return File . pathSeparatorChar != ';' ; // note that we can ' t use the usual File . pathSeparator and etc . , as the OS of
// the machine where this code runs and the OS that this FilePath refers to may be different .
// Windows absolute path is ' X : \ . . . ' , so this is usually a good indication of Windows path
if ( remote . length ( ) > 3 && remote . charAt ( 1 ) == ':' && remote . charAt ( 2 ) == '\\' ) return false ; // Windows can handle ' / ' as a path separator but Unix can ' t ,
// so err on Unix side
return ! remote . contains ( "\\" ) ;
|
public class AmazonLexModelBuildingClient { /** * Returns a list of all of the channels associated with the specified bot .
* The < code > GetBotChannelAssociations < / code > operation requires permissions for the
* < code > lex : GetBotChannelAssociations < / code > action .
* @ param getBotChannelAssociationsRequest
* @ return Result of the GetBotChannelAssociations operation returned by the service .
* @ throws LimitExceededException
* The request exceeded a limit . Try your request again .
* @ throws InternalFailureException
* An internal Amazon Lex error occurred . Try your request again .
* @ throws BadRequestException
* The request is not well formed . For example , a value is invalid or a required field is missing . Check the
* field values , and try again .
* @ sample AmazonLexModelBuilding . GetBotChannelAssociations
* @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / lex - models - 2017-04-19 / GetBotChannelAssociations "
* target = " _ top " > AWS API Documentation < / a > */
@ Override public GetBotChannelAssociationsResult getBotChannelAssociations ( GetBotChannelAssociationsRequest request ) { } }
|
request = beforeClientExecution ( request ) ; return executeGetBotChannelAssociations ( request ) ;
|
public class SimpleDocTreeVisitor { /** * { @ inheritDoc } This implementation calls { @ code defaultAction } .
* @ param node { @ inheritDoc }
* @ param p { @ inheritDoc }
* @ return the result of { @ code defaultAction } */
@ Override public R visitHidden ( HiddenTree node , P p ) { } }
|
return defaultAction ( node , p ) ;
|
public class ChannelFrameworkImpl { /** * Use the criteria to narrow down the provided list of endpoints to
* a proper subset .
* @ param endPointList
* @ param criteria
* @ param getBestOnly
* @ return CFEndPoint [ ] */
private CFEndPoint [ ] commonGetEndPoints ( CFEndPoint [ ] endPointList , CFEndPointCriteria criteria , boolean getBestOnly ) { } }
|
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) { Tr . entry ( tc , "commonGetEndPoints" ) ; } if ( null == endPointList || 0 == endPointList . length || null == criteria ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) { Tr . exit ( tc , "commonGetEndPoints" , null ) ; } return null ; } String chainName = criteria . getChainName ( ) ; List < CFEndPoint > chosenEndPoints = new LinkedList < CFEndPoint > ( ) ; if ( null != chainName ) { for ( int i = 0 ; i < endPointList . length ; i ++ ) { if ( null == endPointList [ i ] ) { continue ; } if ( chainName . equals ( endPointList [ i ] . getName ( ) ) ) { chosenEndPoints . add ( endPointList [ i ] ) ; if ( getBestOnly ) { break ; } } } } else { // Chain name was not provided in criteria .
// Take special steps if the vhost is specified in the criteria .
String vhost = criteria . getVirtualHost ( ) ; if ( null != vhost ) { // If a match is found with an endpoint , then chosenEndPoints should
// only include
// a list of endPoints with matching vhost . If no matches are found ,
// then filter
// out all endPoints that have the unmatching vhost . . . leaving all with
// null vhosts .
// Special Note :
// The case of not finding a match should only happen if no vhost was
// found to match
// the inbound chain ' s host and port that lead to the creation of the
// CFEndPoint . This
// could result from a customer changing the port number on the server ,
// but not
// putting the port into an appropriate virtual host . If precise
// selection of a
// CFEndPoint is needed , then that extra step is required by the
// customer .
List < CFEndPoint > vhostMatchingEndPoints = new ArrayList < CFEndPoint > ( ) ; List < CFEndPoint > vhostNullEndPoints = new ArrayList < CFEndPoint > ( ) ; for ( CFEndPoint tempEndPoint : endPointList ) { if ( tempEndPoint == null ) { continue ; } List < String > vhostList = tempEndPoint . getVirtualHosts ( ) ; if ( vhostList . isEmpty ( ) ) { vhostNullEndPoints . add ( tempEndPoint ) ; } else { for ( String value : vhostList ) { if ( vhost . equalsIgnoreCase ( value ) ) { // Found a match . Add it to the matching array list .
vhostMatchingEndPoints . add ( tempEndPoint ) ; break ; } } } } // Check to see if any matches were found for the vhost .
if ( 0 != vhostMatchingEndPoints . size ( ) ) { // A match was found . The endPointList should only include these
// endPoints .
// Update the array to the correct length . Then fill it with the new
// contents .
endPointList = new CFEndPoint [ vhostMatchingEndPoints . size ( ) ] ; vhostMatchingEndPoints . toArray ( endPointList ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "Found matching vhost in " + vhostMatchingEndPoints . size ( ) + " CFEndPoints." ) ; } } else { // No match was found . The endPointList should only include endPoints
// with no vhost .
// Update the array to the correct length . Then fill it with the new
// contents .
endPointList = new CFEndPoint [ vhostNullEndPoints . size ( ) ] ; vhostNullEndPoints . toArray ( endPointList ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "No matching vhost found. New CFEndPoint list size " + vhostNullEndPoints . size ( ) ) ; } } } // Decision may be based on isSSLRequired and isLocal
boolean sslRequired = criteria . isSSLRequired ( ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "Criteria specification if SSL is required: " + sslRequired ) ; } // Decision may be based on a list of required channel factories .
Class < ? > [ ] requiredFactories = criteria . getOptionalChannelFactories ( ) ; // Iterate the endPoints and compare them to the criteria .
for ( CFEndPoint tempEndPoint : endPointList ) { if ( tempEndPoint == null ) { continue ; } // Double check that the channel accessor is not null .
if ( tempEndPoint . getChannelAccessor ( ) == null ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "Skipping over end point with null channel accessor, " + tempEndPoint . getName ( ) ) ; } continue ; } // Check for inclusion of the required factories .
if ( requiredFactories != null ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "Found required factories to match" ) ; } boolean foundFactory = true ; Class < ? > ocdFactory = null ; List < OutboundChannelDefinition > ocdList = tempEndPoint . getOutboundChannelDefs ( ) ; // Iterate the list of required factories to ensure each is in the end
// point .
for ( Class < ? > requiredFactory : requiredFactories ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "RequiredFactory: " + requiredFactory ) ; } // Note that we haven ' t found a match yet .
foundFactory = false ; // Iterate the list of outbound channel definitions in search for
// the required factory class .
for ( OutboundChannelDefinition def : ocdList ) { ocdFactory = def . getOutboundFactory ( ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "OCD Factory: " + ocdFactory ) ; } // Try to match the required factory class to this outbound
// channel definition .
if ( requiredFactory . isAssignableFrom ( ocdFactory ) || ocdFactory . isAssignableFrom ( requiredFactory ) ) { // Found a match . This endpoint is good so far .
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "Found a match" ) ; } foundFactory = true ; break ; } } // Verify that the required factory was in the endpoint .
if ( ! foundFactory ) { // Did NOT find a match . This endpoint is no good . Break out of
// this loop .
break ; } } if ( ! foundFactory ) { // Did NOT find a match for a required factory . This endpoint is no
// good . Move on to next endpoint .
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "Skipping over endpoint with missing required factory." ) ; } continue ; } } // Check that the channel accessor matches the endPoint .
if ( tempEndPoint . getChannelAccessor ( ) . equals ( criteria . getChannelAccessor ( ) ) ) { // Found matching channel accessor . Check if SSL is required in the
// criteria
if ( sslRequired ) { // The criteria requires an SSL enabled endpoint .
if ( tempEndPoint . isSSLEnabled ( ) ) { // Check to see if an endpoint was already found .
if ( chosenEndPoints . size ( ) == 0 ) { // No endpoint found yet . Found a matching endpoint .
chosenEndPoints . add ( tempEndPoint ) ; // Searching can stop if the endpoint is local
if ( tempEndPoint . isLocal ( ) ) { // The matching endpoint is local and getBestOnly . Stop
// search .
if ( getBestOnly ) { break ; } } } else { if ( getBestOnly ) { // Endpoint was already found . We are searching for a better
// local one .
if ( tempEndPoint . isLocal ( ) ) { // This endpoint is local and the former endPoint wasn ' t and
// getBestOnly . Stop search .
chosenEndPoints . add ( tempEndPoint ) ; break ; } } else { chosenEndPoints . add ( tempEndPoint ) ; } } } } else { // The criteria requires a non SSL enabled endpoint .
if ( ! tempEndPoint . isSSLEnabled ( ) ) { // Found a matching non SSL enabled endpoint .
chosenEndPoints . add ( tempEndPoint ) ; // Still need to get a local one if available .
if ( tempEndPoint . isLocal ( ) ) { // Found a matching local endpoint . Stop search .
if ( getBestOnly ) { break ; } } } } // end check sslRequired
} // end check channelAccessor
} // end loop through end points
} // end check chainName
// convert the list to array or null if no match is found .
int rtnSize = chosenEndPoints . size ( ) ; CFEndPoint rtnEPs [ ] = null ; if ( rtnSize > 0 ) { rtnEPs = new CFEndPoint [ rtnSize ] ; chosenEndPoints . toArray ( rtnEPs ) ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) { Tr . exit ( tc , "commonGetEndPoints " + rtnSize ) ; } return rtnEPs ;
|
public class InMemoryChunkAccumulator { /** * Accumulate chunks in a map
* until all chunks have been accumulated .
* You can check all chunks are present with
* { @ link ChunkAccumulator # allPresent ( String ) }
* where the parameter is the id
* After all chunks have been accumulated
* you can call { @ link ChunkAccumulator # reassemble ( String ) }
* where the id is the id of the chunk .
* @ param chunk the chunk */
@ Override public void accumulateChunk ( NDArrayMessageChunk chunk ) { } }
|
String id = chunk . getId ( ) ; if ( ! chunks . containsKey ( id ) ) { List < NDArrayMessageChunk > list = new ArrayList < > ( ) ; list . add ( chunk ) ; chunks . put ( id , list ) ; } else { List < NDArrayMessageChunk > chunkList = chunks . get ( id ) ; chunkList . add ( chunk ) ; } log . debug ( "Accumulating chunk for id " + chunk . getId ( ) ) ;
|
public class ManagementClient { /** * Updates an existing subscription .
* @ param subscriptionDescription - A { @ link SubscriptionDescription } object describing the attributes with which the subscription will be updated .
* @ return { @ link SubscriptionDescription } of the updated subscription .
* @ throws MessagingEntityNotFoundException - Described entity was not found .
* @ throws IllegalArgumentException - descriptor is null .
* @ throws TimeoutException - The operation times out . The timeout period is initiated through ClientSettings . operationTimeout
* @ throws AuthorizationFailedException - No sufficient permission to perform this operation . Please check ClientSettings . tokenProvider has correct details .
* @ throws ServerBusyException - The server is busy . You should wait before you retry the operation .
* @ throws ServiceBusException - An internal error or an unexpected exception occurred .
* @ throws QuotaExceededException - Either the specified size in the description is not supported or the maximum allowed quota has been reached .
* @ throws InterruptedException if the current thread was interrupted */
public SubscriptionDescription updateSubscription ( SubscriptionDescription subscriptionDescription ) throws ServiceBusException , InterruptedException { } }
|
return Utils . completeFuture ( this . asyncClient . updateSubscriptionAsync ( subscriptionDescription ) ) ;
|
public class IPUtil { /** * Retrieves all the IP addresses of the local computer .
* The loopback address ( 127.0.0.1 ) will not be included . */
public static Collection < InterfaceAddress > getAllIPAddresses ( ) throws SocketException { } }
|
ArrayList < InterfaceAddress > direcciones = new ArrayList ( ) ; InterfaceAddress ipLoopback = null ; try { Enumeration ifaces = NetworkInterface . getNetworkInterfaces ( ) ; while ( ifaces . hasMoreElements ( ) ) { NetworkInterface iface = ( NetworkInterface ) ifaces . nextElement ( ) ; for ( InterfaceAddress ips : iface . getInterfaceAddresses ( ) ) { InetAddress ia = ips . getAddress ( ) ; if ( ! ia . getHostAddress ( ) . contains ( ":" ) ) { if ( ia . isLoopbackAddress ( ) ) { ipLoopback = ips ; } else { direcciones . add ( ips ) ; } } } } if ( ( direcciones . isEmpty ( ) ) && ( ipLoopback != null ) ) { direcciones . add ( ipLoopback ) ; } } catch ( SocketException e ) { throw e ; } return direcciones ;
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.