signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class Database { public DbDatum [ ] get_device_property ( String name , String [ ] propnames ) throws DevFailed { } }
return databaseDAO . get_device_property ( this , name , propnames ) ;
public class VariantNormalizer { /** * < p > Compares two CharSequences , and returns the index beginning from the behind , * at which the CharSequences begin to differ . < / p > * Based on { @ link StringUtils # indexOfDifference } * < p > For example , * { @ code reverseIndexOfDifference ( " you are a machine " , " i have one machine " ) - > 8 } < / p > * < pre > * reverseIndexOfDifference ( null , null ) = - 1 * reverseIndexOfDifference ( " " , " " ) = - 1 * reverseIndexOfDifference ( " " , " abc " ) = 0 * reverseIndexOfDifference ( " abc " , " " ) = 0 * reverseIndexOfDifference ( " abc " , " abc " ) = - 1 * reverseIndexOfDifference ( " ab " , " xyzab " ) = 2 * reverseIndexOfDifference ( " abcde " , " xyzab " ) = 2 * reverseIndexOfDifference ( " abcde " , " xyz " ) = 0 * < / pre > * @ param cs1 the first CharSequence , may be null * @ param cs2 the second CharSequence , may be null * @ return the index from behind where cs1 and cs2 begin to differ ; - 1 if they are equal */ public static int reverseIndexOfDifference ( final CharSequence cs1 , final CharSequence cs2 ) { } }
if ( cs1 == cs2 ) { return StringUtils . INDEX_NOT_FOUND ; } if ( cs1 == null || cs2 == null ) { return 0 ; } int i ; int cs1Length = cs1 . length ( ) ; int cs2Length = cs2 . length ( ) ; for ( i = 0 ; i < cs1Length && i < cs2Length ; ++ i ) { if ( cs1 . charAt ( cs1Length - i - 1 ) != cs2 . charAt ( cs2Length - i - 1 ) ) { break ; } } if ( i < cs2Length || i < cs1Length ) { return i ; } return StringUtils . INDEX_NOT_FOUND ;
public class WatchDir { /** * Register the given directory , and all its sub - directories , with the * WatchService . * @ param aStartDir * The start directory to be iterated . May not be < code > null < / code > . */ private void _registerDirRecursive ( @ Nonnull final Path aStartDir ) throws IOException { } }
// register directory and sub - directories Files . walkFileTree ( aStartDir , new SimpleFileVisitor < Path > ( ) { @ Override public FileVisitResult preVisitDirectory ( final Path dir , final BasicFileAttributes attrs ) throws IOException { _registerDir ( dir ) ; return FileVisitResult . CONTINUE ; } } ) ;
public class StringBufferWriter { public void write ( char [ ] ca , int offset , int length ) throws IOException { } }
_buffer . append ( ca , offset , length ) ;
public class TcpConnection { /** * This method is thread safe . */ public int send ( Connection connection , Object object ) throws IOException { } }
SocketChannel socketChannel = this . socketChannel ; if ( socketChannel == null ) throw new SocketException ( "Connection is closed." ) ; synchronized ( writeLock ) { int start = writeBuffer . position ( ) ; int lengthLength = serialization . getLengthLength ( ) ; try { // Leave room for length . writeBuffer . position ( writeBuffer . position ( ) + lengthLength ) ; // Write data . serialization . write ( connection , writeBuffer , object ) ; } catch ( Throwable ex ) { throw new KryoNetException ( "Error serializing object of type: " + object . getClass ( ) . getName ( ) , ex ) ; } int end = writeBuffer . position ( ) ; // Write data length . writeBuffer . position ( start ) ; serialization . writeLength ( writeBuffer , end - lengthLength - start ) ; writeBuffer . position ( end ) ; // Write to socket if no data was queued . if ( start == 0 && ! writeToSocket ( ) ) { // A partial write , set OP _ WRITE to be notified when more writing can occur . selectionKey . interestOps ( SelectionKey . OP_READ | SelectionKey . OP_WRITE ) ; } else { // Full write , wake up selector so idle event will be fired . selectionKey . selector ( ) . wakeup ( ) ; } if ( DEBUG || TRACE ) { float percentage = writeBuffer . position ( ) / ( float ) writeBuffer . capacity ( ) ; if ( DEBUG && percentage > 0.75f ) debug ( "kryonet" , connection + " TCP write buffer is approaching capacity: " + percentage + "%" ) ; else if ( TRACE && percentage > 0.25f ) trace ( "kryonet" , connection + " TCP write buffer utilization: " + percentage + "%" ) ; } lastWriteTime = System . currentTimeMillis ( ) ; return end - start ; }
public class Job { /** * Set property value . * @ param key property key * @ param value property value * @ return the previous value of the specified key in this property list , or { @ code null } if it did not have one */ public Object setProperty ( final String key , final String value ) { } }
return prop . put ( key , value ) ;
public class PrefixedProperties { /** * Gets the property . * @ param value * the value * @ param def * the def * @ return the property */ @ Override public String getProperty ( final String value , final String def ) { } }
final String result = getProperty ( value ) ; return result == null ? def : result ;
public class ClosureBundler { /** * Append the contents of the file to the supplied appendable . */ public void appendTo ( Appendable out , DependencyInfo info , File content , Charset contentCharset ) throws IOException { } }
appendTo ( out , info , Files . asCharSource ( content , contentCharset ) ) ;
public class SingleThreadBlockingQpsBenchmark { /** * Useful for triggering a subset of the benchmark in a profiler . */ public static void main ( String [ ] argv ) throws Exception { } }
SingleThreadBlockingQpsBenchmark bench = new SingleThreadBlockingQpsBenchmark ( ) ; bench . setup ( ) ; for ( int i = 0 ; i < 10000 ; i ++ ) { bench . blockingUnary ( ) ; } Thread . sleep ( 30000 ) ; bench . teardown ( ) ; System . exit ( 0 ) ;
public class Waiter { /** * Clears the log . */ public void clearLog ( ) { } }
Process p = null ; try { p = Runtime . getRuntime ( ) . exec ( "logcat -c" ) ; } catch ( IOException e ) { e . printStackTrace ( ) ; }
public class MapAttributeDefinition { /** * Iterates through the items in the { @ code parameter } map , calling { @ link # convertParameterElementExpressions ( ModelNode ) } * for each value . * < strong > Note < / strong > that the default implementation of { @ link # convertParameterElementExpressions ( ModelNode ) } * will only convert simple { @ link ModelType # STRING } values . If users need to handle complex values * with embedded expressions , they should use a subclass that overrides that method . * { @ inheritDoc } */ @ Override protected ModelNode convertParameterExpressions ( ModelNode parameter ) { } }
ModelNode result = parameter ; List < Property > asList ; try { asList = parameter . asPropertyList ( ) ; } catch ( IllegalArgumentException iae ) { // We can ' t convert ; we ' ll just return parameter asList = null ; } if ( asList != null ) { boolean changeMade = false ; ModelNode newMap = new ModelNode ( ) . setEmptyObject ( ) ; for ( Property prop : parameter . asPropertyList ( ) ) { ModelNode converted = convertParameterElementExpressions ( prop . getValue ( ) ) ; newMap . get ( prop . getName ( ) ) . set ( converted ) ; changeMade |= ! converted . equals ( prop . getValue ( ) ) ; } if ( changeMade ) { result = newMap ; } } return result ;
public class StorageGatewayUtils { /** * Sends a request to the AWS Storage Gateway server running at the * specified address , and returns the activation key for that server . * @ param gatewayAddress * The DNS name or IP address of a running AWS Storage Gateway * @ param activationRegionName * The region in which the gateway will be activated . * @ return The activation key required for some API calls to AWS Storage * Gateway . * @ throws AmazonClientException * If any problems are encountered while trying to contact the * remote AWS Storage Gateway server . */ public static String getActivationKey ( String gatewayAddress , Region activationRegion ) throws AmazonClientException { } }
return getActivationKey ( gatewayAddress , activationRegion == null ? null : activationRegion . getName ( ) ) ;
public class CmsCommentImages { /** * Returns the HTML for the dialog input form to comment the images . < p > * @ return the HTML for the dialog input form to comment the images */ public String buildDialogForm ( ) { } }
StringBuffer result = new StringBuffer ( 16384 ) ; Iterator < CmsResource > i = getImages ( ) . iterator ( ) ; result . append ( "<div style=\"height: 450px; padding: 4px; overflow: auto;\">" ) ; while ( i . hasNext ( ) ) { CmsResource res = i . next ( ) ; String imageName = res . getName ( ) ; String propertySuffix = "" + imageName . hashCode ( ) ; result . append ( dialogBlockStart ( imageName ) ) ; result . append ( "<table border=\"0\">\n" ) ; result . append ( "<tr>\n\t<td style=\"vertical-align: top;\">" ) ; // create image tag result . append ( "<img src=\"" ) ; StringBuffer link = new StringBuffer ( 256 ) ; link . append ( getCms ( ) . getSitePath ( res ) ) ; link . append ( getImageScaler ( ) . toRequestParam ( ) ) ; result . append ( getJsp ( ) . link ( link . toString ( ) ) ) ; result . append ( "\" border=\"0\" alt=\"\" width=\"" ) ; result . append ( getImageScaler ( ) . getWidth ( ) ) ; result . append ( "\" height=\"" ) ; result . append ( getImageScaler ( ) . getHeight ( ) ) ; result . append ( "\">" ) ; result . append ( "</td>\n" ) ; result . append ( "\t<td class=\"maxwidth\" style=\"vertical-align: top;\">\n" ) ; result . append ( "\t\t<table border=\"0\">\n" ) ; // build title property input row String title = "" ; try { title = getCms ( ) . readPropertyObject ( res , CmsPropertyDefinition . PROPERTY_TITLE , false ) . getValue ( ) ; } catch ( CmsException e ) { // log , should never happen if ( LOG . isErrorEnabled ( ) ) { LOG . error ( e . getLocalizedMessage ( getLocale ( ) ) ) ; } } result . append ( "\t\t<tr>\n\t\t\t<td style=\"white-space: nowrap;\" unselectable=\"on\">" ) ; result . append ( key ( Messages . GUI_LABEL_TITLE_0 ) ) ; result . append ( ":</td>\n\t\t\t<td class=\"maxwidth\">" ) ; result . append ( "<input type=\"text\" class=\"maxwidth\" name=\"" ) ; result . append ( PREFIX_TITLE ) ; result . append ( propertySuffix ) ; result . append ( "\" value=\"" ) ; if ( CmsStringUtil . isNotEmpty ( title ) ) { result . append ( CmsEncoder . escapeXml ( title ) ) ; } result . append ( "\">" ) ; result . append ( "</td>\n\t\t</tr>\n" ) ; // build description property input row String description = "" ; try { description = getCms ( ) . readPropertyObject ( res , CmsPropertyDefinition . PROPERTY_DESCRIPTION , false ) . getValue ( ) ; } catch ( CmsException e ) { // log , should never happen if ( LOG . isErrorEnabled ( ) ) { LOG . error ( e . getLocalizedMessage ( getLocale ( ) ) ) ; } } result . append ( "\t\t<tr>\n\t\t\t<td style=\"white-space: nowrap; vertical-align: top;\" unselectable=\"on\">" ) ; result . append ( key ( Messages . GUI_LABEL_DESCRIPTION_0 ) ) ; result . append ( ":</td>\n\t\t\t<td style=\"vertical-align: top; height: 110px;\">" ) ; result . append ( "<textarea rows=\"8\" class=\"maxwidth\" style=\"overflow: auto;\" name=\"" ) ; result . append ( PREFIX_DESCRIPTION ) ; result . append ( propertySuffix ) ; result . append ( "\">" ) ; if ( CmsStringUtil . isNotEmpty ( description ) ) { result . append ( CmsEncoder . escapeXml ( description ) ) ; } result . append ( "</textarea>" ) ; result . append ( "</td>\n\t\t</tr>\n" ) ; result . append ( "\t\t</table>\n" ) ; result . append ( "</td>\n</tr>\n" ) ; result . append ( "</table>\n" ) ; result . append ( dialogBlockEnd ( ) ) ; if ( i . hasNext ( ) ) { // append spacer if another entry follows result . append ( dialogSpacer ( ) ) ; } } result . append ( "</div>" ) ; return result . toString ( ) ;
public class PolicyAssignmentsInner { /** * Gets all the policy assignments for a subscription . * @ param nextPageLink The NextLink from the previous successful call to List operation . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the PagedList & lt ; PolicyAssignmentInner & gt ; object */ public Observable < Page < PolicyAssignmentInner > > listNextAsync ( final String nextPageLink ) { } }
return listNextWithServiceResponseAsync ( nextPageLink ) . map ( new Func1 < ServiceResponse < Page < PolicyAssignmentInner > > , Page < PolicyAssignmentInner > > ( ) { @ Override public Page < PolicyAssignmentInner > call ( ServiceResponse < Page < PolicyAssignmentInner > > response ) { return response . body ( ) ; } } ) ;
public class LastModifiedServlet { /** * Gets a last modified time given a context - relative path starting with a * slash ( / ) . * Any file ending in " . css " ( case - insensitive ) will be parsed and will have * a modified time that is equal to the greatest of itself or any referenced * URL . * @ return the modified time or < code > 0 < / code > when unknown . */ public static long getLastModified ( ServletContext servletContext , HttpServletRequest request , String path , String extension ) { } }
HeaderAndPath hap = new HeaderAndPath ( request , path ) ; if ( CSS_EXTENSION . equals ( extension ) ) { try { // Parse CSS file , finding all dependencies . // Don ' t re - parse when CSS file not changed , but still check // dependencies . return ParsedCssFile . parseCssFile ( servletContext , hap ) . newestLastModified ; } catch ( IOException e ) { return 0 ; } } else { return getCachedLastModified ( servletContext , hap ) ; }
public class BrowsersDataProvider { /** * Return available grid browsers applying filter defined by Map content . * Filter - > Regexp as : " filter . key ( ) = filter . value ( key ) [ , | } ] " * @ param filter browser selected for test execution * @ return browsers list */ private static List < String > gridBrowsers ( Map < String , String > filter ) { } }
ArrayList < String > response = new ArrayList < String > ( ) ; LOGGER . debug ( "Trying to get a list of Selenium-available browsers" ) ; String grid = System . getProperty ( "SELENIUM_GRID" ) ; if ( grid != null ) { grid = "http://" + grid + "/grid/console" ; Document doc ; try { doc = Jsoup . connect ( grid ) . timeout ( DEFAULT_TIMEOUT ) . get ( ) ; } catch ( IOException e ) { LOGGER . debug ( "Exception on connecting to Selenium grid: {}" , e . getMessage ( ) ) ; return response ; } Elements slaves = ( Elements ) doc . select ( "div.proxy" ) ; for ( Element slave : slaves ) { String slaveStatus = slave . select ( "p.proxyname" ) . first ( ) . text ( ) ; if ( ! slaveStatus . contains ( "Connection" ) && ! slaveStatus . contains ( "Conexión" ) ) { Integer iBusy = 0 ; Elements browserList = slave . select ( "div.content_detail" ) . select ( "*[title]" ) ; Elements busyBrowserList = slave . select ( "div.content_detail" ) . select ( "p > .busy" ) ; for ( Element browserDetails : browserList ) { if ( browserDetails . attr ( "title" ) . startsWith ( "{" ) ) { boolean filterCheck = true ; for ( Map . Entry < String , String > f : filter . entrySet ( ) ) { String stringFilter = f . getKey ( ) + "=" + f . getValue ( ) + "[,|}]" ; Pattern patFilter = Pattern . compile ( stringFilter ) ; Matcher mFilter = patFilter . matcher ( browserDetails . attr ( "title" ) ) ; filterCheck = filterCheck & mFilter . find ( ) ; } if ( filterCheck ) { Pattern pat = Pattern . compile ( "browserName=(.*?),.*?(version=(.*?)[,|}])" ) ; Matcher m = pat . matcher ( browserDetails . attr ( "title" ) ) ; while ( m . find ( ) ) { String browser = m . group ( 1 ) + "_" + m . group ( 3 ) ; if ( browser . equals ( System . getProperty ( "FORCE_BROWSER" ) ) ) { response . add ( browser ) ; } } } } else { String version = busyBrowserList . get ( iBusy ) . parent ( ) . text ( ) ; String browser = busyBrowserList . get ( iBusy ) . text ( ) ; version = version . substring ( 2 ) ; version = version . replace ( browser , "" ) ; String browserSrc = busyBrowserList . get ( iBusy ) . select ( "img" ) . attr ( "src" ) ; if ( ! browserSrc . equals ( "" ) ) { browser = browserSrc . substring ( browserSrc . lastIndexOf ( '/' ) + 1 , browserSrc . length ( ) - DEFAULT_LESS_LENGTH ) ; } response . add ( browser + "_" + version ) ; iBusy ++ ; } } } } } // Sort response Collections . sort ( response ) ; return response ;
public class TextPost { /** * Get the details of this post ( and the base details ) * @ return the details */ @ Override protected Map < String , Object > detail ( ) { } }
final Map < String , Object > map = super . detail ( ) ; map . put ( "title" , this . title ) ; map . put ( "body" , this . body ) ; return map ;
public class AutoEncodingFilteredTag { /** * @ Override * public boolean isValidatingMediaInputType ( MediaType inputType ) { * return inputValidator ! = null & & inputValidator . isValidatingMediaInputType ( inputType ) ; */ @ Override public void doTag ( ) throws JspException , IOException { } }
try { final PageContext pageContext = ( PageContext ) getJspContext ( ) ; final ServletRequest request = pageContext . getRequest ( ) ; final HttpServletResponse response = ( HttpServletResponse ) pageContext . getResponse ( ) ; final JspWriter out = pageContext . getOut ( ) ; final ThreadEncodingContext parentEncodingContext = ThreadEncodingContext . getCurrentContext ( request ) ; // Determine the container ' s content type MediaType containerContentType ; if ( parentEncodingContext != null ) { // Use the output type of the parent containerContentType = parentEncodingContext . contentType ; } else { // Use the content type of the response String responseContentType = response . getContentType ( ) ; // Default to XHTML : TODO : Is there a better way since can ' t set content type early in response then reset again . . . if ( responseContentType == null ) responseContentType = MediaType . XHTML . getContentType ( ) ; containerContentType = MediaType . getMediaTypeForContentType ( responseContentType ) ; } // Find the encoder final MediaType myContentType = getContentType ( ) ; MediaEncoder mediaEncoder = MediaEncoder . getInstance ( new HttpServletResponseEncodingContext ( response ) , myContentType , containerContentType ) ; if ( mediaEncoder != null ) { setMediaEncoderOptions ( mediaEncoder ) ; // Encode both our output and the content . The encoder validates our input and guarantees valid output for our parent . MediaWriter mediaWriter = new MediaWriter ( mediaEncoder , out ) ; mediaWriter . writePrefix ( ) ; try { ThreadEncodingContext . setCurrentContext ( request , new ThreadEncodingContext ( myContentType , mediaWriter ) ) ; try { doTag ( mediaWriter ) ; } finally { // Restore previous encoding context that is used for our output ThreadEncodingContext . setCurrentContext ( request , parentEncodingContext ) ; } } finally { mediaWriter . writeSuffix ( ) ; } } else { // If parentValidMediaInput exists and is validating our output type , no additional validation is required if ( parentEncodingContext != null && parentEncodingContext . validMediaInput . isValidatingMediaInputType ( myContentType ) ) { ThreadEncodingContext . setCurrentContext ( request , new ThreadEncodingContext ( myContentType , parentEncodingContext . validMediaInput ) ) ; try { doTag ( out ) ; } finally { ThreadEncodingContext . setCurrentContext ( request , parentEncodingContext ) ; } } else { // Not using an encoder and parent doesn ' t validate our output , validate our own output . MediaValidator validator = MediaValidator . getMediaValidator ( myContentType , out ) ; ThreadEncodingContext . setCurrentContext ( request , new ThreadEncodingContext ( myContentType , validator ) ) ; try { doTag ( validator ) ; } finally { ThreadEncodingContext . setCurrentContext ( request , parentEncodingContext ) ; } } } } catch ( MediaException err ) { throw new JspTagException ( err ) ; }
public class AWSTransferClient { /** * Describes the server that you specify by passing the < code > ServerId < / code > parameter . * The response contains a description of the server ' s properties . * @ param describeServerRequest * @ return Result of the DescribeServer operation returned by the service . * @ throws ServiceUnavailableException * The request has failed because the AWS Transfer for SFTP service is not available . * @ throws InternalServiceErrorException * This exception is thrown when an error occurs in the AWS Transfer for SFTP service . * @ throws InvalidRequestException * This exception is thrown when the client submits a malformed request . * @ throws ResourceNotFoundException * This exception is thrown when a resource is not found by the AWS Transfer for SFTP service . * @ sample AWSTransfer . DescribeServer * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / transfer - 2018-11-05 / DescribeServer " target = " _ top " > AWS API * Documentation < / a > */ @ Override public DescribeServerResult describeServer ( DescribeServerRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeDescribeServer ( request ) ;
public class ChildAxisQuery { /** * { @ inheritDoc } */ public QueryHits execute ( JcrIndexSearcher searcher , SessionImpl session , Sort sort ) throws IOException { } }
if ( sort . getSort ( ) . length == 0 && matchesAnyChildNode ( ) ) { Query context = getContextQuery ( ) ; return new ChildNodesQueryHits ( searcher . evaluate ( context ) , session , indexConfig ) ; } else { return null ; }
public class DiscoveryNodeListProvider { /** * ( non - Javadoc ) * @ see com . netflix . evcache . pool . EVCacheNodeList # discoverInstances ( ) */ @ Override public Map < ServerGroup , EVCacheServerGroupConfig > discoverInstances ( String appName ) throws IOException { } }
if ( ( applicationInfoManager . getInfo ( ) . getStatus ( ) == InstanceStatus . DOWN ) ) { return Collections . < ServerGroup , EVCacheServerGroupConfig > emptyMap ( ) ; } /* Get a list of EVCACHE instances from the DiscoveryManager */ final Application app = _discoveryClient . getApplication ( _appName ) ; if ( app == null ) return Collections . < ServerGroup , EVCacheServerGroupConfig > emptyMap ( ) ; final List < InstanceInfo > appInstances = app . getInstances ( ) ; final Map < ServerGroup , EVCacheServerGroupConfig > instancesSpecific = new HashMap < ServerGroup , EVCacheServerGroupConfig > ( ) ; /* Iterate all the discovered instances to find usable ones */ for ( InstanceInfo iInfo : appInstances ) { final DataCenterInfo dcInfo = iInfo . getDataCenterInfo ( ) ; if ( dcInfo == null ) { if ( log . isErrorEnabled ( ) ) log . error ( "Data Center Info is null for appName - " + _appName ) ; continue ; } /* Only AWS instances are usable ; bypass all others */ if ( DataCenterInfo . Name . Amazon != dcInfo . getName ( ) || ! ( dcInfo instanceof AmazonInfo ) ) { log . error ( "This is not an AWSDataCenter. You will not be able to use Discovery Nodelist Provider. Cannot proceed. " + "DataCenterInfo : {}; appName - {}. Please use SimpleNodeList provider and specify the server groups manually." , dcInfo , _appName ) ; continue ; } final AmazonInfo amznInfo = ( AmazonInfo ) dcInfo ; // We checked above if this instance is Amazon so no need to do a instanceof check final String zone = amznInfo . get ( AmazonInfo . MetaDataKey . availabilityZone ) ; if ( zone == null ) { EVCacheMetricsFactory . increment ( _appName , null , "EVCacheClient-DiscoveryNodeListProvider-NULL_ZONE" ) ; continue ; } final String asgName = iInfo . getASGName ( ) ; if ( asgName == null ) { EVCacheMetricsFactory . increment ( _appName , null , "EVCacheClient-DiscoveryNodeListProvider-NULL_SERVER_GROUP" ) ; continue ; } final DynamicBooleanProperty asgEnabled = EVCacheConfig . getInstance ( ) . getDynamicBooleanProperty ( asgName + ".enabled" , true ) ; if ( ! asgEnabled . get ( ) ) { if ( log . isDebugEnabled ( ) ) log . debug ( "ASG " + asgName + " is disabled so ignoring it" ) ; continue ; } final Map < String , String > metaInfo = iInfo . getMetadata ( ) ; final int evcachePort = Integer . parseInt ( ( metaInfo != null && metaInfo . containsKey ( "evcache.port" ) ) ? metaInfo . get ( "evcache.port" ) : DEFAULT_PORT ) ; final int rendPort = ( metaInfo != null && metaInfo . containsKey ( "rend.port" ) ) ? Integer . parseInt ( metaInfo . get ( "rend.port" ) ) : 0 ; final int rendBatchPort = ( metaInfo != null && metaInfo . containsKey ( "rend.batch.port" ) ) ? Integer . parseInt ( metaInfo . get ( "rend.batch.port" ) ) : 0 ; final int udsproxyMemcachedPort = ( metaInfo != null && metaInfo . containsKey ( "udsproxy.memcached.port" ) ) ? Integer . parseInt ( metaInfo . get ( "udsproxy.memcached.port" ) ) : 0 ; final int udsproxyMementoPort = ( metaInfo != null && metaInfo . containsKey ( "udsproxy.memento.port" ) ) ? Integer . parseInt ( metaInfo . get ( "udsproxy.memento.port" ) ) : 0 ; ChainedDynamicProperty . BooleanProperty useBatchPort = useRendBatchPortMap . get ( asgName ) ; if ( useBatchPort == null ) { useBatchPort = EVCacheConfig . getInstance ( ) . getChainedBooleanProperty ( _appName + ".use.batch.port" , "evcache.use.batch.port" , Boolean . FALSE , null ) ; useRendBatchPortMap . put ( asgName , useBatchPort ) ; } int port = rendPort == 0 ? evcachePort : ( ( useBatchPort . get ( ) . booleanValue ( ) ) ? rendBatchPort : rendPort ) ; final ChainedDynamicProperty . BooleanProperty isSecure = EVCacheConfig . getInstance ( ) . getChainedBooleanProperty ( asgName + ".use.secure" , _appName + ".use.secure" , false , null ) ; if ( isSecure . get ( ) ) { port = Integer . parseInt ( ( metaInfo != null && metaInfo . containsKey ( "evcache.secure.port" ) ) ? metaInfo . get ( "evcache.secure.port" ) : DEFAULT_SECURE_PORT ) ; } final ServerGroup serverGroup = new ServerGroup ( zone , asgName ) ; final Set < InetSocketAddress > instances ; final EVCacheServerGroupConfig config ; if ( instancesSpecific . containsKey ( serverGroup ) ) { config = instancesSpecific . get ( serverGroup ) ; instances = config . getInetSocketAddress ( ) ; } else { instances = new HashSet < InetSocketAddress > ( ) ; config = new EVCacheServerGroupConfig ( serverGroup , instances , rendPort , udsproxyMemcachedPort , udsproxyMementoPort ) ; instancesSpecific . put ( serverGroup , config ) ; EVCacheMetricsFactory . getLongGauge ( _appName + "-port" , BasicTagList . of ( "ServerGroup" , asgName , "APP" , _appName ) ) . set ( Long . valueOf ( port ) ) ; } /* Don ' t try to use downed instances */ final InstanceStatus status = iInfo . getStatus ( ) ; if ( status == null || InstanceStatus . OUT_OF_SERVICE == status || InstanceStatus . DOWN == status ) { if ( log . isDebugEnabled ( ) ) log . debug ( "The Status of the instance in Discovery is " + status + ". App Name : " + _appName + "; Zone : " + zone + "; Host : " + iInfo . getHostName ( ) + "; Instance Id - " + iInfo . getId ( ) ) ; continue ; } final InstanceInfo myInfo = applicationInfoManager . getInfo ( ) ; final DataCenterInfo myDC = myInfo . getDataCenterInfo ( ) ; final AmazonInfo myAmznDC = ( myDC instanceof AmazonInfo ) ? ( AmazonInfo ) myDC : null ; final String myInstanceId = myInfo . getInstanceId ( ) ; final String myIp = myInfo . getIPAddr ( ) ; final String myPublicHostName = ( myAmznDC != null ) ? myAmznDC . get ( AmazonInfo . MetaDataKey . publicHostname ) : null ; boolean isInCloud = false ; if ( myPublicHostName != null ) { isInCloud = myPublicHostName . startsWith ( "ec2" ) ; } if ( ! isInCloud ) { if ( myAmznDC != null && myAmznDC . get ( AmazonInfo . MetaDataKey . vpcId ) != null ) { isInCloud = true ; } else { if ( myIp . equals ( myInstanceId ) ) { isInCloud = false ; } } } final String myZone = ( myAmznDC != null ) ? myAmznDC . get ( AmazonInfo . MetaDataKey . availabilityZone ) : null ; final String myRegion = ( myZone != null ) ? myZone . substring ( 0 , myZone . length ( ) - 1 ) : null ; final String region = ( zone != null ) ? zone . substring ( 0 , zone . length ( ) - 1 ) : null ; final String host = amznInfo . get ( AmazonInfo . MetaDataKey . publicHostname ) ; InetSocketAddress address = null ; final String vpcId = amznInfo . get ( AmazonInfo . MetaDataKey . vpcId ) ; final String localIp = amznInfo . get ( AmazonInfo . MetaDataKey . localIpv4 ) ; if ( log . isDebugEnabled ( ) ) log . debug ( "myZone - " + myZone + "; zone : " + zone + "; myRegion : " + myRegion + "; region : " + region + "; host : " + host + "; vpcId : " + vpcId ) ; if ( localIp != null && ignoreHosts . get ( ) . contains ( localIp ) ) continue ; if ( host != null && ignoreHosts . get ( ) . contains ( host ) ) continue ; if ( vpcId != null ) { final InetAddress add = InetAddresses . forString ( localIp ) ; final InetAddress inetAddress = InetAddress . getByAddress ( localIp , add . getAddress ( ) ) ; address = new InetSocketAddress ( inetAddress , port ) ; if ( log . isDebugEnabled ( ) ) log . debug ( "VPC : localIp - " + localIp + " ; add : " + add + "; inetAddress : " + inetAddress + "; address - " + address + "; App Name : " + _appName + "; Zone : " + zone + "; myZone - " + myZone + "; Host : " + iInfo . getHostName ( ) + "; Instance Id - " + iInfo . getId ( ) ) ; } else { if ( host != null && host . startsWith ( "ec2" ) ) { final InetAddress inetAddress = ( localIp != null ) ? InetAddress . getByAddress ( host , InetAddresses . forString ( localIp ) . getAddress ( ) ) : InetAddress . getByName ( host ) ; address = new InetSocketAddress ( inetAddress , port ) ; if ( log . isDebugEnabled ( ) ) log . debug ( "myZone - " + myZone + ". host : " + host + "; inetAddress : " + inetAddress + "; address - " + address + "; App Name : " + _appName + "; Zone : " + zone + "; Host : " + iInfo . getHostName ( ) + "; Instance Id - " + iInfo . getId ( ) ) ; } else { final String ipToUse = ( isInCloud ) ? localIp : amznInfo . get ( AmazonInfo . MetaDataKey . publicIpv4 ) ; final InetAddress add = InetAddresses . forString ( ipToUse ) ; final InetAddress inetAddress = InetAddress . getByAddress ( ipToUse , add . getAddress ( ) ) ; address = new InetSocketAddress ( inetAddress , port ) ; if ( log . isDebugEnabled ( ) ) log . debug ( "CLASSIC : IPToUse - " + ipToUse + " ; add : " + add + "; inetAddress : " + inetAddress + "; address - " + address + "; App Name : " + _appName + "; Zone : " + zone + "; myZone - " + myZone + "; Host : " + iInfo . getHostName ( ) + "; Instance Id - " + iInfo . getId ( ) ) ; } } instances . add ( address ) ; } return instancesSpecific ;
public class ConfigUtil { /** * Parse a complex array structure and return it as a map containing maps as * values for further internal structures < br > * Example of configuration we support : * smartcardAdaptor . cardFeatures . 1 . cardFamily = VGE * smartcardAdaptor . cardFeatures . 1 . supportSbm = true * smartcardAdaptor . cardFeatures . 1 . maxSbmBlocks = 16 * smartcardAdaptor . cardFeatures . 1 . sbmMSB . 0 = 0x00 * smartcardAdaptor . cardFeatures . 1 . sbmMSB . 1 = 0x01 * smartcardAdaptor . cardFeatures . 1 . maxRegions = 4 * smartcardAdaptor . cardFeatures . 1 . initialCounter = 1000 * smartcardAdaptor . cardFeatures . 1 . maxNumOfOPPVs = 25 * smartcardAdaptor . cardFeatures . 23 . cardFamily = VGE * smartcardAdaptor . cardFeatures . 23 . supportSbm = true * smartcardAdaptor . cardFeatures . 23 . maxSbmBlocks = 16 * smartcardAdaptor . cardFeatures . 23 . sbmMSB . 0 = 0x00 * smartcardAdaptor . cardFeatures . 23 . sbmMSB . 1 = 0x01 * smartcardAdaptor . cardFeatures . 23 . maxRegions = 4 * smartcardAdaptor . cardFeatures . 23 . initialCounter = 1000 * smartcardAdaptor . cardFeatures . 23 . maxNumOfOPPVs = 25 * < br > * Will return a map where key is 1 , 23 and values are the key / value pairs * the follow the key . < br > * E . g Map - key = 1 , value = Map ( cardFamily - > VGE , supportSbm - > true ) etc . */ public static Map < String , Map < String , String > > parseComplexArrayStructure ( String configPrefix ) { } }
Configuration configuration = ConfigurationFactory . getConfiguration ( ) ; Map < String , Map < String , String > > result = new HashMap < String , Map < String , String > > ( ) ; // get a subset of the configuration based on the config prefix . final Configuration subset = configuration . subset ( configPrefix ) ; @ SuppressWarnings ( "unchecked" ) final Iterator < String > keys = subset . getKeys ( ) ; while ( keys . hasNext ( ) ) { final String key = keys . next ( ) ; final String mapKey = stripKey ( key ) ; // only if the map key is not null and contains the above saved // criteria key do we want to handle this . if ( mapKey != null ) { // first time only - create the inner map instance if ( ! result . containsKey ( mapKey ) ) { result . put ( mapKey , new HashMap < String , String > ( ) ) ; } // get the inner map key value . final String innerMapKey = getInnerMapKey ( key ) ; // update the reply map format . result . get ( mapKey ) . put ( innerMapKey , subset . getString ( key ) ) ; } } return result ;
public class CollectionUtils { /** * Convert given collection to an array . * @ param collection * source collection . * @ return an array has full given collection element and order by * collection index . */ public static < T > T [ ] toArray ( final Collection < T > collection ) { } }
T next = getFirstNotNullValue ( collection ) ; if ( next != null ) { Object [ ] objects = collection . toArray ( ) ; @ SuppressWarnings ( "unchecked" ) T [ ] convertedObjects = ( T [ ] ) Array . newInstance ( next . getClass ( ) , objects . length ) ; System . arraycopy ( objects , 0 , convertedObjects , 0 , objects . length ) ; return convertedObjects ; } else { return null ; }
public class ServletHolder { public void start ( ) throws Exception { } }
_unavailable = 0 ; super . start ( ) ; if ( ! javax . servlet . Servlet . class . isAssignableFrom ( _class ) ) { Exception ex = new IllegalStateException ( "Servlet " + _class + " is not a javax.servlet.Servlet" ) ; super . stop ( ) ; throw ex ; } _config = new Config ( ) ; if ( _runAs != null ) _realm = _httpHandler . getHttpContext ( ) . getRealm ( ) ; if ( javax . servlet . SingleThreadModel . class . isAssignableFrom ( _class ) ) _servlets = new Stack ( ) ; if ( _initOnStartup ) { _servlet = ( Servlet ) newInstance ( ) ; try { initServlet ( _servlet , _config ) ; } catch ( Throwable e ) { _servlet = null ; _config = null ; if ( e instanceof Exception ) throw ( Exception ) e ; else if ( e instanceof Error ) throw ( Error ) e ; else throw new ServletException ( e ) ; } }
public class KTypeHashSet { /** * Returns the exact value of the existing key . This method makes sense for sets * of objects which define custom key - equality relationship . * @ see # indexOf * @ param index The index of an existing key . * @ return Returns the equivalent key currently stored in the set . * @ throws AssertionError If assertions are enabled and the index does * not correspond to an existing key . */ public KType indexGet ( int index ) { } }
assert index >= 0 : "The index must point at an existing key." ; assert index <= mask || ( index == mask + 1 && hasEmptyKey ) ; return Intrinsics . < KType > cast ( keys [ index ] ) ;
import java . lang . Math ; class CanBeSumOfSquares { /** * This function checks if a given integer can be expressed as the sum of squares of two different integers . * Examples : * > > > can _ be _ sum _ of _ squares ( 25) * True * > > > can _ be _ sum _ of _ squares ( 24) * False * > > > can _ be _ sum _ of _ squares ( 17) * True * Args : * num : An integer to check * Returns : * A boolean value indicating if the input can be expressed as the sum of squares of two different integers */ public static boolean canBeSumOfSquares ( int num ) { } }
int x = 1 ; while ( ( x * x ) <= num ) { int y = 1 ; while ( ( y * y ) <= num ) { if ( ( ( x * x ) + ( y * y ) ) == num ) { return true ; } y += 1 ; } x += 1 ; } return false ;
public class AWSElasticBeanstalkClient { /** * Deletes the specified version from the specified application . * < note > * You cannot delete an application version that is associated with a running environment . * < / note > * @ param deleteApplicationVersionRequest * Request to delete an application version . * @ return Result of the DeleteApplicationVersion operation returned by the service . * @ throws SourceBundleDeletionException * Unable to delete the Amazon S3 source bundle associated with the application version . The application * version was deleted successfully . * @ throws InsufficientPrivilegesException * The specified account does not have sufficient privileges for one or more AWS services . * @ throws OperationInProgressException * Unable to perform the specified operation because another operation that effects an element in this * activity is already in progress . * @ throws S3LocationNotInServiceRegionException * The specified S3 bucket does not belong to the S3 region in which the service is running . The following * regions are supported : < / p > * < ul > * < li > * IAD / us - east - 1 * < / li > * < li > * PDX / us - west - 2 * < / li > * < li > * DUB / eu - west - 1 * < / li > * @ sample AWSElasticBeanstalk . DeleteApplicationVersion * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / elasticbeanstalk - 2010-12-01 / DeleteApplicationVersion " * target = " _ top " > AWS API Documentation < / a > */ @ Override public DeleteApplicationVersionResult deleteApplicationVersion ( DeleteApplicationVersionRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeDeleteApplicationVersion ( request ) ;
public class VirtualNetworkGatewaysInner { /** * Generates VPN client package for P2S client of the virtual network gateway in the specified resource group . * @ param resourceGroupName The name of the resource group . * @ param virtualNetworkGatewayName The name of the virtual network gateway . * @ param parameters Parameters supplied to the generate virtual network gateway VPN client package operation . * @ param serviceCallback the async ServiceCallback to handle successful and failed responses . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the { @ link ServiceFuture } object */ public ServiceFuture < String > generatevpnclientpackageAsync ( String resourceGroupName , String virtualNetworkGatewayName , VpnClientParameters parameters , final ServiceCallback < String > serviceCallback ) { } }
return ServiceFuture . fromResponse ( generatevpnclientpackageWithServiceResponseAsync ( resourceGroupName , virtualNetworkGatewayName , parameters ) , serviceCallback ) ;
public class StreamHandler { /** * Format and publish a < tt > LogRecord < / tt > . * The < tt > StreamHandler < / tt > first checks if there is an < tt > OutputStream < / tt > * and if the given < tt > LogRecord < / tt > has at least the required log level . * If not it silently returns . If so , it calls any associated * < tt > Filter < / tt > to check if the record should be published . If so , * it calls its < tt > Formatter < / tt > to format the record and then writes * the result to the current output stream . * If this is the first < tt > LogRecord < / tt > to be written to a given * < tt > OutputStream < / tt > , the < tt > Formatter < / tt > ' s " head " string is * written to the stream before the < tt > LogRecord < / tt > is written . * @ param record description of the log event . A null record is * silently ignored and is not published */ @ Override public synchronized void publish ( LogRecord record ) { } }
if ( ! isLoggable ( record ) ) { return ; } String msg ; try { msg = getFormatter ( ) . format ( record ) ; } catch ( Exception ex ) { // We don ' t want to throw an exception here , but we // report the exception to any registered ErrorManager . reportError ( null , ex , ErrorManager . FORMAT_FAILURE ) ; return ; } try { if ( ! doneHeader ) { writer . write ( getFormatter ( ) . getHead ( this ) ) ; doneHeader = true ; } writer . write ( msg ) ; } catch ( Exception ex ) { // We don ' t want to throw an exception here , but we // report the exception to any registered ErrorManager . reportError ( null , ex , ErrorManager . WRITE_FAILURE ) ; }
public class Balanced { /** * Splits a string around the specified character , returning the parts in an array . * However , any occurrence of the specified character enclosed between balanced parentheses / brackets / braces is ignored . * @ param symbols an optional functor to provide the complete set of balancing symbols * @ param text a String * @ param begin a begin offset * @ param end an end offset * @ param delimiter the character to split the string around * @ param extra an optional functor to provide balancing symbols in addition to the standard ones * @ return a String array containing the split parts */ public static String [ ] split ( Functor < Integer , Integer > symbols , String text , int begin , int end , char delimiter , Functor < Integer , Integer > extra ) { } }
List < String > list = new ArrayList < > ( ) ; return split ( list , symbols , text , begin , end , delimiter , extra ) . toArray ( new String [ list . size ( ) ] ) ;
public class GetIdRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( GetIdRequest getIdRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( getIdRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( getIdRequest . getAccountId ( ) , ACCOUNTID_BINDING ) ; protocolMarshaller . marshall ( getIdRequest . getIdentityPoolId ( ) , IDENTITYPOOLID_BINDING ) ; protocolMarshaller . marshall ( getIdRequest . getLogins ( ) , LOGINS_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class EventStreamClient { /** * Read asynchronously events from connected event stream . This method start read thread and returns immediately . Given * callback argument is used to pass events ; when a new event arrives from server { @ link Callback # handle ( Object ) } is * invoked . * Please note that this method should be used on a connected event stream , i . e . after { @ link # open ( URL ) } or auto - open * constructor . * @ param callback callback invoked on new event or on error . * @ throws BugError if attempt to use this method on a not connected event stream . * @ throws InterruptedException if waiting for thread start is interrupted . */ public void read ( Callback < Event > callback ) throws BugError , InterruptedException { } }
if ( inputStream == null ) { throw new BugError ( "Attempt to read from a not connected event stream." ) ; } this . callback = callback ; thread = new Thread ( this , getClass ( ) . getSimpleName ( ) ) ; synchronized ( this ) { thread . start ( ) ; thread . wait ( THREAD_START_TIMEOUT ) ; }
public class Result { /** * Discards the given cookie . The cookie max - age is set to 0 , so is going to be invalidated . * @ param name the name of the cookie * @ return the current result */ public Result discard ( String name ) { } }
Cookie cookie = getCookie ( name ) ; if ( cookie != null ) { cookies . remove ( cookie ) ; cookies . add ( Cookie . builder ( cookie ) . setMaxAge ( 0 ) . build ( ) ) ; } else { cookies . add ( Cookie . builder ( name , "" ) . setMaxAge ( 0 ) . build ( ) ) ; } return this ;
public class CmsJspContentAccessBean { /** * Returns the map of RDFA maps by locale . < p > * @ return the map of RDFA maps by locale */ public Map < String , Map < String , String > > getLocaleRdfa ( ) { } }
if ( m_localeRdfa == null ) { m_localeRdfa = CmsCollectionsGenericWrapper . createLazyMap ( new CmsLocaleRdfaTransformer ( ) ) ; } return m_localeRdfa ;
public class ApiOvhMe { /** * Alter this object properties * REST : PUT / me / accessRestriction / ipDefaultRule * @ param body [ required ] New object properties */ public void accessRestriction_ipDefaultRule_PUT ( OvhIpRestrictionDefaultRule body ) throws IOException { } }
String qPath = "/me/accessRestriction/ipDefaultRule" ; StringBuilder sb = path ( qPath ) ; exec ( qPath , "PUT" , sb . toString ( ) , body ) ;
public class Calculator { public static int getIndex ( String [ ] arr , String str ) { } }
if ( arr == null || arr . length == 0 || str == null ) { return - 1 ; } int len = arr . length ; for ( int i = 0 ; i < len ; i ++ ) { if ( str . compareToIgnoreCase ( arr [ i ] ) == 0 ) { return i ; } } return - 1 ;
public class CommercePriceEntryLocalServiceWrapper { /** * Returns all the commerce price entries matching the UUID and company . * @ param uuid the UUID of the commerce price entries * @ param companyId the primary key of the company * @ return the matching commerce price entries , or an empty list if no matches were found */ @ Override public java . util . List < com . liferay . commerce . price . list . model . CommercePriceEntry > getCommercePriceEntriesByUuidAndCompanyId ( String uuid , long companyId ) { } }
return _commercePriceEntryLocalService . getCommercePriceEntriesByUuidAndCompanyId ( uuid , companyId ) ;
public class route { /** * Use this API to unset the properties of route resource . * Properties that need to be unset are specified in args array . */ public static base_response unset ( nitro_service client , route resource , String [ ] args ) throws Exception { } }
route unsetresource = new route ( ) ; unsetresource . network = resource . network ; unsetresource . netmask = resource . netmask ; unsetresource . gateway = resource . gateway ; unsetresource . td = resource . td ; return unsetresource . unset_resource ( client , args ) ;
public class QueryUtil { /** * Get a rKNN query object for the given distance function . * When possible , this will use an index , but it may default to an expensive * linear scan . * Hints include : * < ul > * < li > Integer : maximum value for k needed < / li > * < li > { @ link de . lmu . ifi . dbs . elki . database . query . DatabaseQuery # HINT _ BULK } bulk * query needed < / li > * < / ul > * @ param relation Relation used * @ param distanceFunction Distance function * @ param hints Optimizer hints * @ param < O > Object type * @ return RKNN Query object */ public static < O > RKNNQuery < O > getRKNNQuery ( Relation < O > relation , DistanceFunction < ? super O > distanceFunction , Object ... hints ) { } }
final DistanceQuery < O > distanceQuery = relation . getDistanceQuery ( distanceFunction , hints ) ; return relation . getRKNNQuery ( distanceQuery , hints ) ;
public class MpJwtPrincipalHandler { /** * If there is a JWTAccount installed in the exchange security context , create * @ param exchange - the request / response exchange * @ throws Exception on failure */ @ Override public void handleRequest ( HttpServerExchange exchange ) throws Exception { } }
Account account = exchange . getSecurityContext ( ) . getAuthenticatedAccount ( ) ; if ( account != null && account . getPrincipal ( ) instanceof JsonWebToken ) { JsonWebToken token = ( JsonWebToken ) account . getPrincipal ( ) ; PrincipalProducer myInstance = CDI . current ( ) . select ( PrincipalProducer . class ) . get ( ) ; myInstance . setJsonWebToken ( token ) ; } next . handleRequest ( exchange ) ;
public class DefaultGroovyMethods { /** * Create a Collection composed of the intersection of both collections . Any * elements that exist in both collections are added to the resultant collection . * < pre class = " groovyTestCase " > assert [ 4,5 ] = = [ 1,2,3,4,5 ] . intersect ( [ 4,5,6,7,8 ] ) < / pre > * @ param left a Collection * @ param right a Collection * @ return a Collection as an intersection of both collections * @ since 1.5.6 */ public static < T > Collection < T > intersect ( Collection < T > left , Collection < T > right ) { } }
if ( left . isEmpty ( ) || right . isEmpty ( ) ) return createSimilarCollection ( left , 0 ) ; if ( left . size ( ) < right . size ( ) ) { Collection < T > swaptemp = left ; left = right ; right = swaptemp ; } // TODO optimise if same type ? // boolean nlgnSort = sameType ( new Collection [ ] { left , right } ) ; Collection < T > result = createSimilarCollection ( left , left . size ( ) ) ; // creates the collection to look for values . Collection < T > pickFrom = new TreeSet < T > ( new NumberAwareComparator < T > ( ) ) ; pickFrom . addAll ( left ) ; for ( final T t : right ) { if ( pickFrom . contains ( t ) ) result . add ( t ) ; } return result ;
public class Getter { /** * Returns a { @ code View } with a certain index , from the list of current { @ code View } s of the specified type . * @ param classToFilterBy which { @ code View } s to choose from * @ param index choose among all instances of this type , e . g . { @ code Button . class } or { @ code EditText . class } * @ return a { @ code View } with a certain index , from the list of current { @ code View } s of the specified type */ public < T extends View > T getView ( Class < T > classToFilterBy , int index ) { } }
return waiter . waitForAndGetView ( index , classToFilterBy ) ;
public class Clipboard { /** * This method places incoming VoidAggregation into clipboard , for further tracking * @ param aggregation * @ return TRUE , if given VoidAggregation was the last chunk , FALSE otherwise */ public boolean pin ( @ NonNull VoidAggregation aggregation ) { } }
RequestDescriptor descriptor = RequestDescriptor . createDescriptor ( aggregation . getOriginatorId ( ) , aggregation . getTaskId ( ) ) ; VoidAggregation existing = clipboard . get ( descriptor ) ; if ( existing == null ) { existing = aggregation ; trackingCounter . incrementAndGet ( ) ; clipboard . put ( descriptor , aggregation ) ; } existing . accumulateAggregation ( aggregation ) ; // if ( counter . incrementAndGet ( ) % 10000 = = 0) // log . info ( " Clipboard stats : Totals : { } ; Completed : { } ; " , clipboard . size ( ) , completedQueue . size ( ) ) ; int missing = existing . getMissingChunks ( ) ; if ( missing == 0 ) { // completedQueue . add ( existing ) ; completedCounter . incrementAndGet ( ) ; return true ; } else return false ;
public class FutureResult { /** * get result of the call . * @ return result of call * @ throws IllegalStateException if call isn ' t done . */ public T get ( ) throws IllegalStateException { } }
switch ( this . state ) { case INCOMPLETE : // Do not block browser so just throw ex throw new IllegalStateException ( "The server response did not yet recieved." ) ; case FAILED : throw new IllegalStateException ( this . error ) ; case SUCCEEDED : return this . value ; default : throw new IllegalStateException ( "Something very unclear" ) ; }
public class PowerMock { /** * Mock all methods of a class except for a specific one . Use this method * only if you have several overloaded methods . * @ param < T > The type of the mock . * @ param type The type that ' ll be used to create a mock instance . * @ param methodNameToExclude The name of the method not to mock . * @ param firstArgumentType The type of the first parameter of the method not to mock * @ param moreTypes Optionally more parameter types that defines the method . Note * that this is only needed to separate overloaded methods . * @ return A mock object of type < T > . */ public static synchronized < T > T createPartialMockForAllMethodsExcept ( Class < T > type , String methodNameToExclude , Class < ? > firstArgumentType , Class < ? > ... moreTypes ) { } }
/* * The reason why we ' ve split the first and " additional types " is * because it should not intervene with the mockAllExcept ( type , * String . . . methodNames ) method . */ final Class < ? > [ ] argumentTypes = mergeArgumentTypes ( firstArgumentType , moreTypes ) ; return createMock ( type , WhiteboxImpl . getAllMethodsExcept ( type , methodNameToExclude , argumentTypes ) ) ;
public class Workteam { /** * The Amazon Marketplace identifier for a vendor ' s work team . * @ param productListingIds * The Amazon Marketplace identifier for a vendor ' s work team . */ public void setProductListingIds ( java . util . Collection < String > productListingIds ) { } }
if ( productListingIds == null ) { this . productListingIds = null ; return ; } this . productListingIds = new java . util . ArrayList < String > ( productListingIds ) ;
public class MediaGroup { /** * Default content index MediaContent . * @ param defaultContentIndex Default content index MediaContent . */ public void setDefaultContentIndex ( final Integer defaultContentIndex ) { } }
for ( int i = 0 ; i < getContents ( ) . length ; i ++ ) { if ( i == defaultContentIndex . intValue ( ) ) { getContents ( ) [ i ] . setDefaultContent ( true ) ; } else { getContents ( ) [ i ] . setDefaultContent ( false ) ; } } this . defaultContentIndex = defaultContentIndex ;
public class WnsService { /** * Pushes a toast to channelUri * @ param channelUri * @ param toast which should be built with { @ link ar . com . fernandospr . wns . model . builders . WnsToastBuilder } * @ return WnsNotificationResponse please see response headers from < a href = " http : / / msdn . microsoft . com / en - us / library / windows / apps / hh465435 . aspx # send _ notification _ response " > http : / / msdn . microsoft . com / en - us / library / windows / apps / hh465435 . aspx # send _ notification _ response < / a > * @ throws WnsException when authentication fails */ public WnsNotificationResponse pushToast ( String channelUri , WnsToast toast ) throws WnsException { } }
return this . pushToast ( channelUri , null , toast ) ;
public class SipApplicationDispatcherImpl { /** * ( non - Javadoc ) * @ see javax . sip . SipListener # processIOException ( javax . sip . IOExceptionEvent ) */ public void processIOException ( IOExceptionEvent event ) { } }
if ( event instanceof IOExceptionEventExt && ( ( IOExceptionEventExt ) event ) . getReason ( ) == gov . nist . javax . sip . IOExceptionEventExt . Reason . KeepAliveTimeout ) { IOExceptionEventExt keepAliveTimeout = ( ( IOExceptionEventExt ) event ) ; SipConnector connector = findSipConnector ( keepAliveTimeout . getLocalHost ( ) , keepAliveTimeout . getLocalPort ( ) , keepAliveTimeout . getTransport ( ) ) ; if ( connector != null ) { for ( SipContext sipContext : applicationDeployed . values ( ) ) { final ClassLoader oldClassLoader = Thread . currentThread ( ) . getContextClassLoader ( ) ; sipContext . enterSipContext ( ) ; try { for ( SipConnectorListener connectorListener : sipContext . getListeners ( ) . getSipConnectorListeners ( ) ) { try { connectorListener . onKeepAliveTimeout ( connector , keepAliveTimeout . getPeerHost ( ) , keepAliveTimeout . getPeerPort ( ) ) ; } catch ( Throwable t ) { logger . error ( "SipErrorListener threw exception" , t ) ; } } } finally { sipContext . exitSipContext ( oldClassLoader ) ; } } // return ; } } if ( dnsServerLocator != null && event . getSource ( ) instanceof ClientTransaction ) { ClientTransaction ioExceptionTx = ( ClientTransaction ) event . getSource ( ) ; if ( ioExceptionTx . getApplicationData ( ) != null ) { SipServletMessageImpl sipServletMessageImpl = ( ( TransactionApplicationData ) ioExceptionTx . getApplicationData ( ) ) . getSipServletMessage ( ) ; if ( sipServletMessageImpl != null && sipServletMessageImpl instanceof SipServletRequestImpl ) { if ( logger . isDebugEnabled ( ) ) { logger . debug ( "An IOException occured on " + event . getHost ( ) + ":" + event . getPort ( ) + "/" + event . getTransport ( ) + " for source " + event . getSource ( ) + ", trying to visit next hop as per RFC3263" ) ; } if ( ( ( SipServletRequestImpl ) sipServletMessageImpl ) . visitNextHop ( ) ) { return ; } } } } logger . error ( "An IOException occured on " + event . getHost ( ) + ":" + event . getPort ( ) + "/" + event . getTransport ( ) + " for source " + event . getSource ( ) ) ;
public class Product { /** * Gets the roadblockingType value for this Product . * @ return roadblockingType * The strategy for serving roadblocked creatives , i . e . instances * where * multiple creatives must be served together on a single * web page . * < span class = " constraint Applicable " > This attribute * is applicable when : < ul > < li > using programmatic guaranteed , using sales * management . < / li > < li > not using programmatic , using sales management . < / li > < / ul > < / span > * < span class = " constraint ReadOnly " > This attribute is read - only when : < ul > < li > using * programmatic guaranteed , using sales management . < / li > < li > not using * programmatic , using sales management . < / li > < / ul > < / span > */ public com . google . api . ads . admanager . axis . v201808 . RoadblockingType getRoadblockingType ( ) { } }
return roadblockingType ;
public class PdfSmartCopy { /** * Translate a PRIndirectReference to a PdfIndirectReference * In addition , translates the object numbers , and copies the * referenced object to the output file if it wasn ' t available * in the cache yet . If it ' s in the cache , the reference to * the already used stream is returned . * NB : PRIndirectReferences ( and PRIndirectObjects ) really need to know what * file they came from , because each file has its own namespace . The translation * we do from their namespace to ours is * at best * heuristic , and guaranteed to * fail under some circumstances . */ protected PdfIndirectReference copyIndirect ( PRIndirectReference in ) throws IOException , BadPdfFormatException { } }
PdfObject srcObj = PdfReader . getPdfObjectRelease ( in ) ; ByteStore streamKey = null ; boolean validStream = false ; if ( srcObj . isStream ( ) ) { streamKey = new ByteStore ( ( PRStream ) srcObj ) ; validStream = true ; PdfIndirectReference streamRef = ( PdfIndirectReference ) streamMap . get ( streamKey ) ; if ( streamRef != null ) { return streamRef ; } } PdfIndirectReference theRef ; RefKey key = new RefKey ( in ) ; IndirectReferences iRef = ( IndirectReferences ) indirects . get ( key ) ; if ( iRef != null ) { theRef = iRef . getRef ( ) ; if ( iRef . getCopied ( ) ) { return theRef ; } } else { theRef = body . getPdfIndirectReference ( ) ; iRef = new IndirectReferences ( theRef ) ; indirects . put ( key , iRef ) ; } if ( srcObj . isDictionary ( ) ) { PdfObject type = PdfReader . getPdfObjectRelease ( ( ( PdfDictionary ) srcObj ) . get ( PdfName . TYPE ) ) ; if ( type != null && PdfName . PAGE . equals ( type ) ) { return theRef ; } } iRef . setCopied ( ) ; if ( validStream ) { streamMap . put ( streamKey , theRef ) ; } PdfObject obj = copyObject ( srcObj ) ; addToBody ( obj , theRef ) ; return theRef ;
public class EndpointSendConfigurationMarshaller { /** * Marshall the given parameter object . */ public void marshall ( EndpointSendConfiguration endpointSendConfiguration , ProtocolMarshaller protocolMarshaller ) { } }
if ( endpointSendConfiguration == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( endpointSendConfiguration . getBodyOverride ( ) , BODYOVERRIDE_BINDING ) ; protocolMarshaller . marshall ( endpointSendConfiguration . getContext ( ) , CONTEXT_BINDING ) ; protocolMarshaller . marshall ( endpointSendConfiguration . getRawContent ( ) , RAWCONTENT_BINDING ) ; protocolMarshaller . marshall ( endpointSendConfiguration . getSubstitutions ( ) , SUBSTITUTIONS_BINDING ) ; protocolMarshaller . marshall ( endpointSendConfiguration . getTitleOverride ( ) , TITLEOVERRIDE_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class ArrayUtils { /** * < p > Copies the given array and adds the given element at the end of the new array . < / p > * < p > The new array contains the same elements of the input * array plus the given element in the last position . The component type of * the new array is the same as that of the input array . < / p > * < p > If the input array is < code > null < / code > , a new one element array is returned * whose component type is the same as the element , unless the element itself is null , * in which case the return type is Object [ ] < / p > * < pre > * ArrayUtils . add ( null , null ) = [ null ] * ArrayUtils . add ( null , " a " ) = [ " a " ] * ArrayUtils . add ( [ " a " ] , null ) = [ " a " , null ] * ArrayUtils . add ( [ " a " ] , " b " ) = [ " a " , " b " ] * ArrayUtils . add ( [ " a " , " b " ] , " c " ) = [ " a " , " b " , " c " ] * < / pre > * @ param array the array to " add " the element to , may be < code > null < / code > * @ param element the object to add , may be < code > null < / code > * @ return A new array containing the existing elements plus the new element * The returned array type will be that of the input array ( unless null ) , * in which case it will have the same type as the element . * @ since 2.1 */ public static Object [ ] add ( Object [ ] array , Object element ) { } }
Class type ; if ( array != null ) { type = array . getClass ( ) ; } else if ( element != null ) { type = element . getClass ( ) ; } else { type = Object . class ; } Object [ ] newArray = ( Object [ ] ) copyArrayGrow1 ( array , type ) ; newArray [ newArray . length - 1 ] = element ; return newArray ;
public class ModelsImpl { /** * Adds an entity extractor to the application . * @ param appId The application ID . * @ param versionId The version ID . * @ param addEntityOptionalParameter the object representing the optional parameters to be set before calling this API * @ param serviceCallback the async ServiceCallback to handle successful and failed responses . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the { @ link ServiceFuture } object */ public ServiceFuture < UUID > addEntityAsync ( UUID appId , String versionId , AddEntityOptionalParameter addEntityOptionalParameter , final ServiceCallback < UUID > serviceCallback ) { } }
return ServiceFuture . fromResponse ( addEntityWithServiceResponseAsync ( appId , versionId , addEntityOptionalParameter ) , serviceCallback ) ;
public class TagAPI { /** * Update the tags on the objects * @ param reference * The object the tags should be updated on * @ param tags * The tags that should now be set on the object */ public void updateTags ( Reference reference , String ... tags ) { } }
updateTags ( reference , Arrays . asList ( tags ) ) ;
public class Gauge { /** * The factor defines the width of the medium tick mark . * It can be in the range from 0 - 1. * @ param FACTOR */ public void setMediumTickMarkWidthFactor ( final double FACTOR ) { } }
if ( null == mediumTickMarkWidthFactor ) { _mediumTickMarkWidthFactor = Helper . clamp ( 0.0 , 1.0 , FACTOR ) ; fireUpdateEvent ( REDRAW_EVENT ) ; } else { mediumTickMarkWidthFactor . set ( FACTOR ) ; }
public class CopyManager { /** * Pass results of a COPY TO STDOUT query from database into a Writer . * @ param sql COPY TO STDOUT statement * @ param to the stream to write the results to ( row by row ) * @ return number of rows updated for server 8.2 or newer ; - 1 for older * @ throws SQLException on database usage errors * @ throws IOException upon writer or database connection failure */ public long copyOut ( final String sql , Writer to ) throws SQLException , IOException { } }
byte [ ] buf ; CopyOut cp = copyOut ( sql ) ; try { while ( ( buf = cp . readFromCopy ( ) ) != null ) { to . write ( encoding . decode ( buf ) ) ; } return cp . getHandledRowCount ( ) ; } catch ( IOException ioEX ) { // if not handled this way the close call will hang , at least in 8.2 if ( cp . isActive ( ) ) { cp . cancelCopy ( ) ; } try { // read until exhausted or operation cancelled SQLException while ( ( buf = cp . readFromCopy ( ) ) != null ) { } } catch ( SQLException sqlEx ) { } // typically after several kB throw ioEX ; } finally { // see to it that we do not leave the connection locked if ( cp . isActive ( ) ) { cp . cancelCopy ( ) ; } }
public class SplitShowAreaHandler { private void cleanup ( ) { } }
for ( Rectangle rectangle : labelBgs ) { mapWidget . render ( rectangle , RenderGroup . SCREEN , RenderStatus . DELETE ) ; } labelBgs . clear ( ) ; for ( Text text : labelTxts ) { mapWidget . render ( text , RenderGroup . SCREEN , RenderStatus . DELETE ) ; } labelTxts . clear ( ) ; centroids . clear ( ) ;
public class CrudMB { /** * called via preRenderView or viewAction */ public void init ( ) { } }
if ( FacesContext . getCurrentInstance ( ) . getPartialViewContext ( ) . isAjaxRequest ( ) ) { return ; } if ( id != null && ! "" . equals ( id ) ) { entity = crudService . findById ( id ) ; if ( entity == null ) { log . info ( String . format ( "Entity not found with id %s, a new one will be initialized." , id ) ) ; id = null ; entity = initEntity ( ) ; } }
public class RoleAssignmentsInner { /** * Gets all role assignments for the subscription . * @ param filter The filter to apply on the operation . Use $ filter = atScope ( ) to return all role assignments at or above the scope . Use $ filter = principalId eq { id } to return all role assignments at , above or below the scope for the specified principal . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the PagedList & lt ; RoleAssignmentInner & gt ; object */ public Observable < Page < RoleAssignmentInner > > listAsync ( final String filter ) { } }
return listWithServiceResponseAsync ( filter ) . map ( new Func1 < ServiceResponse < Page < RoleAssignmentInner > > , Page < RoleAssignmentInner > > ( ) { @ Override public Page < RoleAssignmentInner > call ( ServiceResponse < Page < RoleAssignmentInner > > response ) { return response . body ( ) ; } } ) ;
public class Tag { /** * - - - - - protected static methods - - - - - */ protected static void beginTag ( final PrintWriter writer , final String tagName , final boolean newline , final List < Attr > attributes , final int level , final String indent ) throws IOException { } }
beginTag ( writer , tagName , newline , false , attributes , level , indent ) ;
public class CommerceNotificationTemplateUserSegmentRelPersistenceImpl { /** * Returns the commerce notification template user segment rels before and after the current commerce notification template user segment rel in the ordered set where commerceUserSegmentEntryId = & # 63 ; . * @ param commerceNotificationTemplateUserSegmentRelId the primary key of the current commerce notification template user segment rel * @ param commerceUserSegmentEntryId the commerce user segment entry ID * @ param orderByComparator the comparator to order the set by ( optionally < code > null < / code > ) * @ return the previous , current , and next commerce notification template user segment rel * @ throws NoSuchNotificationTemplateUserSegmentRelException if a commerce notification template user segment rel with the primary key could not be found */ @ Override public CommerceNotificationTemplateUserSegmentRel [ ] findByCommerceUserSegmentEntryId_PrevAndNext ( long commerceNotificationTemplateUserSegmentRelId , long commerceUserSegmentEntryId , OrderByComparator < CommerceNotificationTemplateUserSegmentRel > orderByComparator ) throws NoSuchNotificationTemplateUserSegmentRelException { } }
CommerceNotificationTemplateUserSegmentRel commerceNotificationTemplateUserSegmentRel = findByPrimaryKey ( commerceNotificationTemplateUserSegmentRelId ) ; Session session = null ; try { session = openSession ( ) ; CommerceNotificationTemplateUserSegmentRel [ ] array = new CommerceNotificationTemplateUserSegmentRelImpl [ 3 ] ; array [ 0 ] = getByCommerceUserSegmentEntryId_PrevAndNext ( session , commerceNotificationTemplateUserSegmentRel , commerceUserSegmentEntryId , orderByComparator , true ) ; array [ 1 ] = commerceNotificationTemplateUserSegmentRel ; array [ 2 ] = getByCommerceUserSegmentEntryId_PrevAndNext ( session , commerceNotificationTemplateUserSegmentRel , commerceUserSegmentEntryId , orderByComparator , false ) ; return array ; } catch ( Exception e ) { throw processException ( e ) ; } finally { closeSession ( session ) ; }
public class DisasterRecoveryConfigurationsInner { /** * Creates or updates a disaster recovery configuration . * @ param resourceGroupName The name of the resource group that contains the resource . You can obtain this value from the Azure Resource Manager API or the portal . * @ param serverName The name of the server . * @ param disasterRecoveryConfigurationName The name of the disaster recovery configuration to be created / updated . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable for the request */ public Observable < ServiceResponse < DisasterRecoveryConfigurationInner > > createOrUpdateWithServiceResponseAsync ( String resourceGroupName , String serverName , String disasterRecoveryConfigurationName ) { } }
if ( this . client . subscriptionId ( ) == null ) { throw new IllegalArgumentException ( "Parameter this.client.subscriptionId() is required and cannot be null." ) ; } if ( resourceGroupName == null ) { throw new IllegalArgumentException ( "Parameter resourceGroupName is required and cannot be null." ) ; } if ( serverName == null ) { throw new IllegalArgumentException ( "Parameter serverName is required and cannot be null." ) ; } if ( disasterRecoveryConfigurationName == null ) { throw new IllegalArgumentException ( "Parameter disasterRecoveryConfigurationName is required and cannot be null." ) ; } if ( this . client . apiVersion ( ) == null ) { throw new IllegalArgumentException ( "Parameter this.client.apiVersion() is required and cannot be null." ) ; } Observable < Response < ResponseBody > > observable = service . createOrUpdate ( this . client . subscriptionId ( ) , resourceGroupName , serverName , disasterRecoveryConfigurationName , this . client . apiVersion ( ) , this . client . acceptLanguage ( ) , this . client . userAgent ( ) ) ; return client . getAzureClient ( ) . getPutOrPatchResultAsync ( observable , new TypeToken < DisasterRecoveryConfigurationInner > ( ) { } . getType ( ) ) ;
public class DefaultAsyncJobExecutor { /** * Starts the async executor */ public void start ( ) { } }
if ( isActive ) { return ; } log . info ( "Starting up the default async job executor [{}]." , getClass ( ) . getName ( ) ) ; if ( timerJobRunnable == null ) { timerJobRunnable = new AcquireTimerJobsRunnable ( this , processEngineConfiguration . getJobManager ( ) ) ; } if ( resetExpiredJobsRunnable == null ) { resetExpiredJobsRunnable = new ResetExpiredJobsRunnable ( this ) ; } if ( ! isMessageQueueMode && asyncJobsDueRunnable == null ) { asyncJobsDueRunnable = new AcquireAsyncJobsDueRunnable ( this ) ; } if ( ! isMessageQueueMode ) { initAsyncJobExecutionThreadPool ( ) ; startJobAcquisitionThread ( ) ; } startTimerAcquisitionThread ( ) ; startResetExpiredJobsThread ( ) ; isActive = true ; executeTemporaryJobs ( ) ;
public class WindowsJNIFaxClientSpi { /** * This function will submit a new fax job . < br > * The fax job ID will be returned by this method . * @ param serverName * The fax server name * @ param targetAddress * The fax job target address * @ param targetName * The fax job target name * @ param senderName * The fax job sender name * @ param fileName * The file to fax * @ param documentName * Document name * @ return The fax job ID ( null in case of an error ) */ private int winSubmitFaxJob ( String serverName , String targetAddress , String targetName , String senderName , String fileName , String documentName ) { } }
int faxJobID = 0 ; synchronized ( WindowsFaxClientSpiHelper . NATIVE_LOCK ) { // pre native call this . preNativeCall ( ) ; // invoke native faxJobID = WindowsJNIFaxClientSpi . submitFaxJobNative ( serverName , targetAddress , targetName , senderName , fileName , documentName ) ; } return faxJobID ;
public class ZookeeperMgr { /** * @ return List < String > * @ Description : 写持久化结点 , 没有则新建 , 存在则进行更新 * @ author liaoqiqi * @ date 2013-6-14 */ public void writePersistentUrl ( String url , String value ) throws Exception { } }
store . write ( url , value ) ;
public class Searcher { /** * Searches a model for the given pattern , then collects the specified elements of the matches * and returns . * @ param < T > BioPAX type * @ param model model to search in * @ param pattern pattern to search for * @ param index index of the element in the match to collect * @ param c type of the element to collect * @ return set of the elements at the specified index of the matching results */ public static < T extends BioPAXElement > Set < T > searchAndCollect ( Model model , Pattern pattern , int index , Class < T > c ) { } }
return searchAndCollect ( model . getObjects ( pattern . getStartingClass ( ) ) , pattern , index , c ) ;
public class EvolutionResume { /** * Run the evolution . */ private EvolutionResult < BitGene , Double > run ( final EvolutionResult < BitGene , Double > last , final AtomicBoolean proceed ) { } }
System . out . println ( "Starting evolution with existing result." ) ; return ( last != null ? ENGINE . stream ( last ) : ENGINE . stream ( ) ) . limit ( r -> proceed . get ( ) ) . collect ( EvolutionResult . toBestEvolutionResult ( ) ) ;
public class ExecutionConfig { /** * Returns the registered Kryo types . */ public LinkedHashSet < Class < ? > > getRegisteredKryoTypes ( ) { } }
if ( isForceKryoEnabled ( ) ) { // if we force kryo , we must also return all the types that // were previously only registered as POJO LinkedHashSet < Class < ? > > result = new LinkedHashSet < > ( ) ; result . addAll ( registeredKryoTypes ) ; for ( Class < ? > t : registeredPojoTypes ) { if ( ! result . contains ( t ) ) { result . add ( t ) ; } } return result ; } else { return registeredKryoTypes ; }
public class PvmExecutionImpl { /** * Delays and stores the given DelayedVariableEvent on the process instance . * @ param delayedVariableEvent the DelayedVariableEvent which should be store on the process instance */ public void delayEvent ( DelayedVariableEvent delayedVariableEvent ) { } }
// if process definition has no conditional events the variable events does not have to be delayed Boolean hasConditionalEvents = this . getProcessDefinition ( ) . getProperties ( ) . get ( BpmnProperties . HAS_CONDITIONAL_EVENTS ) ; if ( hasConditionalEvents == null || ! hasConditionalEvents . equals ( Boolean . TRUE ) ) { return ; } if ( isProcessInstanceExecution ( ) ) { delayedEvents . add ( delayedVariableEvent ) ; } else { getProcessInstance ( ) . delayEvent ( delayedVariableEvent ) ; }
public class HBlinkImageView { /** * display this field in html input format . * @ param out The html out stream . * @ param strFieldDesc The field description . * @ param strFieldName The field name . * @ param strSize The control size . * @ param strMaxSize The string max size . * @ param strValue The default value . * @ param strControlType The control type . * @ param iHtmlAttribures The attributes . */ public void printInputControl ( PrintWriter out , String strFieldDesc , String strFieldName , String strSize , String strMaxSize , String strValue , String strControlType , int iHtmlAttributes ) { } }
String strImage = "" ; out . println ( "<td>" + strImage + "</td>" ) ;
public class OutgoingTupleCollection { /** * Clean the internal state of OutgoingTupleCollection */ public void clear ( ) { } }
lock . lock ( ) ; try { currentControlTuple = null ; currentDataTuple = null ; outQueue . clear ( ) ; } finally { lock . unlock ( ) ; }
public class rnat { /** * Use this API to clear rnat . */ public static base_response clear ( nitro_service client , rnat resource ) throws Exception { } }
rnat clearresource = new rnat ( ) ; clearresource . network = resource . network ; clearresource . netmask = resource . netmask ; clearresource . aclname = resource . aclname ; clearresource . redirectport = resource . redirectport ; clearresource . natip = resource . natip ; clearresource . td = resource . td ; return clearresource . perform_operation ( client , "clear" ) ;
public class MethodInvocationProcessor { /** * - - - - PortableObject implementation - - - - - */ @ Override public void readExternal ( PofReader reader ) throws IOException { } }
name = reader . readString ( 0 ) ; mutator = reader . readBoolean ( 1 ) ; args = reader . readObjectArray ( 2 , new Object [ 0 ] ) ;
public class AfplibPackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public EClass getCPC ( ) { } }
if ( cpcEClass == null ) { cpcEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( AfplibPackage . eNS_URI ) . getEClassifiers ( ) . get ( 228 ) ; } return cpcEClass ;
public class server_servicegroup_binding { /** * Use this API to fetch server _ servicegroup _ binding resources of given name . */ public static server_servicegroup_binding [ ] get ( nitro_service service , String name ) throws Exception { } }
server_servicegroup_binding obj = new server_servicegroup_binding ( ) ; obj . set_name ( name ) ; server_servicegroup_binding response [ ] = ( server_servicegroup_binding [ ] ) obj . get_resources ( service ) ; return response ;
public class Annotate { /** * Queue processing of an attribute default value . */ public void annotateDefaultValueLater ( JCExpression defaultValue , Env < AttrContext > localEnv , MethodSymbol m , DiagnosticPosition deferPos ) { } }
normal ( ( ) -> { JavaFileObject prev = log . useSource ( localEnv . toplevel . sourcefile ) ; DiagnosticPosition prevLintPos = deferredLintHandler . setPos ( deferPos ) ; try { enterDefaultValue ( defaultValue , localEnv , m ) ; } finally { deferredLintHandler . setPos ( prevLintPos ) ; log . useSource ( prev ) ; } } ) ; validate ( ( ) -> { // validate annotations JavaFileObject prev = log . useSource ( localEnv . toplevel . sourcefile ) ; try { // if default value is an annotation , check it is a well - formed // annotation value ( e . g . no duplicate values , no missing values , etc . ) chk . validateAnnotationTree ( defaultValue ) ; } finally { log . useSource ( prev ) ; } } ) ;
public class Tree { /** * Prepare the Tree for rendering . * @ throws JspException if a JSP exception has occurred */ public void doTag ( ) throws JspException , IOException { } }
if ( hasErrors ( ) ) { reportErrors ( ) ; return ; } // See if there is a TreeRoot already defined . _expr = new ExpressionHandling ( this ) ; TreeElement root = null ; try { root = getTreeRoot ( _expr ) ; } catch ( IllegalExpressionException iee ) { String s = Bundle . getString ( "TreeRootError" , new Object [ ] { _dataSource , iee . getMessage ( ) } ) ; registerTagError ( s , null ) ; return ; } if ( hasErrors ( ) ) { reportErrors ( ) ; return ; } // If we don ' t have a root element , then we need to create it by processing the body which contains // / * < TreeItems > * / tags that define the tree . if ( root == null ) { getBufferBody ( false ) ; // check to see if we should exit due to an error occuring // write out any error text and return . if ( _errorText != null ) { write ( _errorText . toString ( ) ) ; if ( hasErrors ( ) ) reportErrors ( ) ; } } // Set the image Root if it is not set . PageContext pageContext = getPageContext ( ) ; if ( _iState . getImageRoot ( ) == null ) _iState . setImageRoot ( ( ( HttpServletRequest ) pageContext . getRequest ( ) ) . getContextPath ( ) + "/" + TagConfig . getTreeImageLocation ( ) ) ; // errors should have been caught above TreeElement treeRoot = getTreeRoot ( _expr ) ; // if the tree root hasn ' t been defined , then we need to update the object that is // pointed at by the dataSource expression . if ( treeRoot == null ) { if ( _rootNode != null ) { try { String datasource = "{" + _dataSource + "}" ; _expr . updateExpression ( datasource , _rootNode , pageContext ) ; } catch ( ExpressionUpdateException e ) { String s = Bundle . getString ( "Tags_UnableToWriteTree" , new Object [ ] { _dataSource , e . getMessage ( ) } ) ; registerTagError ( s , null ) ; reportErrors ( ) ; return ; } treeRoot = _rootNode ; } // indicate an update error and return if ( treeRoot == null ) { String s = Bundle . getString ( "Tags_TreeNoAttribute" , _dataSource ) ; registerTagError ( s , null ) ; reportErrors ( ) ; return ; } } // set the root image if ( treeRoot instanceof ITreeRootElement ) { ITreeRootElement tre = ( ITreeRootElement ) treeRoot ; if ( tre . getRootNodeExpandedImage ( ) == null ) { tre . setRootNodeExpandedImage ( ( _rootNodeExpandedImage != null ) ? _rootNodeExpandedImage : InheritableState . IMAGE_ROOT_EXPANDED ) ; } if ( tre . getRootNodeCollapsedImage ( ) == null ) { tre . setRootNodeCollapsedImage ( ( _rootNodeCollapsedImage != null ) ? _rootNodeCollapsedImage : InheritableState . IMAGE_ROOT_COLLAPSED ) ; } } // if we are running the tree at the client , then // we need to register the tree with the NameService if ( _trs . runAtClient ) { // it ' s currently not legal to have a runAtClient but not be an instance of INameable which is // implemented by the ITreeRootElement . if ( ! ( treeRoot instanceof INameable ) ) { String s = Bundle . getString ( "Tags_TreeRunAtClientRoot" , null ) ; registerTagError ( s , null ) ; reportErrors ( ) ; return ; } // name the tree if it hasn ' t been named already // or if no longer stored in the NameService , add it . INameable in = ( INameable ) treeRoot ; String o = in . getObjectName ( ) ; NameService ns = NameService . instance ( pageContext . getSession ( ) ) ; if ( o == null ) { ns . nameObject ( "Tree" , in ) ; ns . put ( in ) ; } else if ( ns . get ( o ) == null ) { ns . put ( in ) ; } } // prepare to render the tree HttpServletRequest request = ( HttpServletRequest ) pageContext . getRequest ( ) ; HttpServletResponse response = ( HttpServletResponse ) pageContext . getResponse ( ) ; InternalStringBuilder sb = new InternalStringBuilder ( 1024 ) ; StringBuilderRenderAppender writer = new StringBuilderRenderAppender ( sb ) ; // this is the treeId from the request . If there was an tree expansion this will be // non - null and it identifies what tree had the expansion request . // we need to qualify the tree based upon the tagId assert ( _trs . tagId != null ) ; _trs . tagId = getIdForTagId ( _trs . tagId ) ; String treeId = request . getParameter ( TreeElement . TREE_ID ) ; if ( treeId != null && _trs . tagId . equals ( treeId ) ) { TreeHelpers . processTreeRequest ( treeId , treeRoot , request , response ) ; } // check for the nodes that are expanded . . . // Add the script support for the tree . if ( _trs . runAtClient ) { IScriptReporter sr = getScriptReporter ( ) ; if ( sr == null ) { String s = Bundle . getString ( "Tags_TreeRunAtClientSC" , null ) ; registerTagError ( s , null ) ; reportErrors ( ) ; return ; } ScriptRequestState srs = ScriptRequestState . getScriptRequestState ( request ) ; if ( ! srs . isFeatureWritten ( CoreScriptFeature . DYNAMIC_INIT ) ) { String s = Bundle . getString ( "Tags_TreeHtmlRunAtClient" , null ) ; registerTagError ( s , null ) ; reportErrors ( ) ; return ; } assert ( treeRoot instanceof ITreeRootElement ) ; ITreeRootElement tre = ( ITreeRootElement ) treeRoot ; Object [ ] args = new Object [ 8 ] ; args [ 0 ] = _iState . getImageRoot ( ) + "/" ; args [ 1 ] = tre . getObjectName ( ) ; args [ 2 ] = _iState . getNodeCollapsedImage ( ) ; args [ 3 ] = _iState . getNodeExpandedImage ( ) ; args [ 4 ] = _iState . getLastNodeCollapsedImage ( ) ; args [ 5 ] = _iState . getLastNodeExpandedImage ( ) ; args [ 6 ] = Bundle . getString ( "Tags_TreeAltTextExpand" , null ) ; args [ 7 ] = Bundle . getString ( "Tags_TreeAltTextCollapse" , null ) ; srs . writeFeature ( sr , writer , CoreScriptFeature . TREE_INIT , false , false , args ) ; AjaxUrlInfo ajaxInfo = URLRewriterService . getAjaxUrl ( pageContext . getServletContext ( ) , request , treeRoot ) ; if ( ajaxInfo . getCommandPrefix ( ) != null ) { args = new Object [ 2 ] ; args [ 0 ] = tre . getObjectName ( ) ; args [ 1 ] = ajaxInfo . getCommandPrefix ( ) ; srs . writeFeature ( sr , writer , CoreScriptFeature . AJAX_PREFIX , false , false , args ) ; } if ( ajaxInfo . getAjaxParameter ( ) != null ) { args = new Object [ 2 ] ; args [ 0 ] = tre . getObjectName ( ) ; args [ 1 ] = ajaxInfo . getAjaxParameter ( ) ; srs . writeFeature ( sr , writer , CoreScriptFeature . AJAX_PARAM , false , false , args ) ; } tre . setTreeRenderState ( _trs ) ; tre . setInheritableState ( _iState ) ; } // create a containing tree level < div > and place the tree level styles on it . _divState . styleClass = _treeStyleClass ; _divState . style = _treeStyle ; String divId = null ; if ( _renderTagIdLookup ) { _divState . id = _trs . tagId ; divId = _divState . id ; } // if we are running on the client then we need to output the tree name into the top level tree < div > tag if ( _trs . runAtClient ) { _divState . registerAttribute ( AbstractHtmlState . ATTR_GENERAL , "netui:treeName" , ( ( INameable ) treeRoot ) . getObjectName ( ) ) ; } TagRenderingBase divRenderer = TagRenderingBase . Factory . getRendering ( TagRenderingBase . DIV_TAG , request ) ; divRenderer . doStartTag ( writer , _divState ) ; sb . append ( "\n" ) ; // Render the tree . AttributeRenderer extraAttrs = new AttributeRenderer ( ) ; String treeRendererClassName = TagConfig . getTreeRendererClassName ( ) ; TreeRenderer tr = TreeRendererFactory . getInstance ( treeRendererClassName ) ; if ( tr == null ) { tr = new TreeRenderer ( ) ; } tr . init ( _trs , request , response , pageContext . getServletContext ( ) ) ; tr . setTreeRenderSupport ( new TagTreeRenderSupport ( this ) ) ; tr . render ( writer , treeRoot , 0 , extraAttrs , _iState ) ; if ( hasErrors ( ) ) { reportErrors ( ) ; return ; } // finish the tree representation and write it divRenderer . doEndTag ( writer ) ; sb . append ( "\n" ) ; write ( sb . toString ( ) ) ; if ( ! ( treeRoot instanceof ITreeRootElement ) ) { boolean error = false ; if ( _rootNodeExpandedImage != null ) { String s = Bundle . getString ( "Tags_TreeRootImageError" , null ) ; registerTagError ( s , null ) ; error = true ; } if ( _rootNodeCollapsedImage != null ) { String s = Bundle . getString ( "Tags_TreeRootImageError" , null ) ; registerTagError ( s , null ) ; error = true ; } if ( error ) { reportErrors ( ) ; } } // check to see if we are writing out the java . if ( _renderTagIdLookup ) { String jsOut = renderDefaultJavaScript ( request , divId ) ; if ( jsOut != null ) write ( jsOut ) ; }
public class CoreJBossASClient { /** * Adds a new module extension to the core system . * @ param name the name of the new module extension * @ throws Exception any error */ public void addExtension ( String name ) throws Exception { } }
// / extension = < name > / : add ( module = < name > ) final ModelNode request = createRequest ( ADD , Address . root ( ) . add ( EXTENSION , name ) ) ; request . get ( MODULE ) . set ( name ) ; final ModelNode response = execute ( request ) ; if ( ! isSuccess ( response ) ) { throw new FailureException ( response , "Failed to add new module extension [" + name + "]" ) ; } return ;
public class RangeMassDecomposer { /** * Check if a mass is decomposable . This is done in constant time ( especially : it is very very very fast ! ) . * But it doesn ' t check if there is a valid decomposition . Therefore , even if the method returns true , * all decompositions may be invalid for the given validator or given bounds . * # decompose ( mass ) uses this function before starting the decomposition , therefore this method should only * be used if you don ' t want to start the decomposition algorithm . * @ return true if the mass is decomposable , ignoring bounds or any additional filtering rule */ boolean maybeDecomposable ( double from , double to ) { } }
init ( ) ; final int [ ] [ ] [ ] ERTs = this . ERTs ; final int [ ] minmax = new int [ 2 ] ; // normal version seems to be faster , because it returns after first hit integerBound ( from , to , minmax ) ; final int a = weights . get ( 0 ) . getIntegerMass ( ) ; for ( int i = minmax [ 0 ] ; i <= minmax [ 1 ] ; ++ i ) { final int r = i % a ; if ( i >= ERTs [ 0 ] [ r ] [ weights . size ( ) - 1 ] ) return true ; } return false ;
public class CellRepeater { /** * Get the metadata for the current item . This method is not supported by * this tag . * @ throws UnsupportedOperationException this tag does not support this method from the IDataAccessProvider interface * @ see org . apache . beehive . netui . script . common . IDataAccessProvider */ public Object getCurrentMetadata ( ) { } }
LocalizedUnsupportedOperationException uoe = new LocalizedUnsupportedOperationException ( "The " + getTagName ( ) + "does not export metadata for its iterated items." ) ; uoe . setLocalizedMessage ( Bundle . getErrorString ( "Tags_DataAccessProvider_metadataUnsupported" , new Object [ ] { getTagName ( ) } ) ) ; throw uoe ;
public class TemplateMetadata { /** * Builds a Template from a parsed TemplateNode . */ public static TemplateMetadata fromTemplate ( TemplateNode template ) { } }
TemplateMetadata . Builder builder = builder ( ) . setTemplateName ( template . getTemplateName ( ) ) . setSourceLocation ( template . getSourceLocation ( ) ) . setSoyFileKind ( SoyFileKind . SRC ) . setContentKind ( template . getContentKind ( ) ) . setStrictHtml ( template . isStrictHtml ( ) ) . setDelPackageName ( template . getDelPackageName ( ) ) . setVisibility ( template . getVisibility ( ) ) . setParameters ( Parameter . directParametersFromTemplate ( template ) ) . setDataAllCallSituations ( DataAllCallSituation . fromTemplate ( template ) ) ; switch ( template . getKind ( ) ) { case TEMPLATE_BASIC_NODE : builder . setTemplateKind ( Kind . BASIC ) ; break ; case TEMPLATE_DELEGATE_NODE : builder . setTemplateKind ( Kind . DELTEMPLATE ) ; TemplateDelegateNode deltemplate = ( TemplateDelegateNode ) template ; builder . setDelTemplateName ( deltemplate . getDelTemplateName ( ) ) ; builder . setDelTemplateVariant ( deltemplate . getDelTemplateVariant ( ) ) ; break ; case TEMPLATE_ELEMENT_NODE : builder . setTemplateKind ( Kind . ELEMENT ) ; break ; default : throw new AssertionError ( "unexpected template kind: " + template . getKind ( ) ) ; } return builder . build ( ) ;
public class Stream { /** * # # Repartitioning Operation * @ param partitioner * @ return */ public Stream partition ( CustomStreamGrouping partitioner ) { } }
return partition ( Grouping . custom_serialized ( Utils . javaSerialize ( partitioner ) ) ) ;
public class ImageIOGreyScale { /** * Returns an < code > ImageOutputStream < / code > that will send its output to the given < code > Object < / code > . * The set of < code > ImageOutputStreamSpi < / code > s registered with the < code > IIORegistry < / code > class is * queried and the first one that is able to send output from the supplied object is used to create the * returned < code > ImageOutputStream < / code > . If no suitable < code > ImageOutputStreamSpi < / code > exists , * < code > null < / code > is returned . * The current cache settings from < code > getUseCache < / code > and < code > getCacheDirectory < / code > will be used * to control caching . * @ param output * an < code > Object < / code > to be used as an output destination , such as a < code > File < / code > , * writable < code > RandomAccessFile < / code > , or < code > OutputStream < / code > . * @ return an < code > ImageOutputStream < / code > , or < code > null < / code > . * @ exception IllegalArgumentException * if < code > output < / code > is < code > null < / code > . * @ exception IOException * if a cache file is needed but cannot be created . * @ see javax . imageio . spi . ImageOutputStreamSpi */ public static ImageOutputStream createImageOutputStream ( Object output ) throws IOException { } }
if ( output == null ) { throw new IllegalArgumentException ( "output == null!" ) ; } Iterator iter ; // Ensure category is present try { iter = theRegistry . getServiceProviders ( ImageOutputStreamSpi . class , true ) ; } catch ( IllegalArgumentException e ) { return null ; } boolean usecache = getUseCache ( ) && hasCachePermission ( ) ; while ( iter . hasNext ( ) ) { ImageOutputStreamSpi spi = ( ImageOutputStreamSpi ) iter . next ( ) ; if ( spi . getOutputClass ( ) . isInstance ( output ) ) { try { return spi . createOutputStreamInstance ( output , usecache , getCacheDirectory ( ) ) ; } catch ( IOException e ) { throw new IIOException ( "Can't create cache file!" , e ) ; } } } return null ;
public class Lens { /** * Called when a capture is triggered but not yet saved to a { @ link File } , enabling additional * processing before saving . The default implementation immediately calls the { @ code listener } * with the original screenshot . * @ param screenshot A reference to the screenshot that was captured . Can be null if screenshots * were disabled . * @ param listener callback for when additional processing has been completed . This listener must * be called for the screenshot to be saved to disk . */ public void onCapture ( @ Nullable Bitmap screenshot , @ NonNull BitmapProcessorListener listener ) { } }
listener . onBitmapReady ( screenshot ) ;
public class CommandLineIndicatorRunner { /** * Creates a list with the available indicators ( but setCoverage ) * @ param referenceFront * @ return * @ throws FileNotFoundException */ private static List < QualityIndicator < List < PointSolution > , Double > > getAvailableIndicators ( Front referenceFront ) throws FileNotFoundException { } }
List < QualityIndicator < List < PointSolution > , Double > > list = new ArrayList < > ( ) ; list . add ( new Epsilon < PointSolution > ( referenceFront ) ) ; list . add ( new PISAHypervolume < PointSolution > ( referenceFront ) ) ; list . add ( new GenerationalDistance < PointSolution > ( referenceFront ) ) ; list . add ( new InvertedGenerationalDistance < PointSolution > ( referenceFront ) ) ; list . add ( new InvertedGenerationalDistancePlus < PointSolution > ( referenceFront ) ) ; list . add ( new Spread < PointSolution > ( referenceFront ) ) ; list . add ( new GeneralizedSpread < PointSolution > ( referenceFront ) ) ; // list . add ( new R2 < List < DoubleSolution > > ( referenceFront ) ) ; list . add ( new ErrorRatio < List < PointSolution > > ( referenceFront ) ) ; return list ;
public class ReflectionUtils { /** * Checks if the class is an integer type , i . e . , is numeric but not a floating point type . * @ param type the class we want to check * @ return true if the type is an integral type */ public static boolean isIntegerType ( final Class type ) { } }
return Arrays . < Class > asList ( Integer . class , int . class , Long . class , long . class , Short . class , short . class , Byte . class , byte . class ) . contains ( type ) ;
public class StackTraceHelper { /** * Get the stack trace of a throwable as string . * @ param t * The throwable to be converted . May be < code > null < / code > . * @ param bOmitCommonStackTraceElements * If < code > true < / code > the stack trace is cut after certain class * names occurring . If < code > false < / code > the complete stack trace is * returned . * @ return the stack trace as newline separated string . If the passed * Throwable is < code > null < / code > an empty string is returned . */ @ Nonnull public static String getStackAsString ( @ Nullable final Throwable t , final boolean bOmitCommonStackTraceElements ) { } }
if ( t == null ) return "" ; // convert call stack to string final StringBuilder aCallStack = _getRecursiveStackAsStringBuilder ( t , null , null , 1 , bOmitCommonStackTraceElements ) ; // avoid having a separator at the end - > remove the last char if ( StringHelper . getLastChar ( aCallStack ) == STACKELEMENT_LINESEP ) aCallStack . deleteCharAt ( aCallStack . length ( ) - 1 ) ; // no changes return aCallStack . toString ( ) ;
public class CmsAlertDialog { /** * Adds a widget to this dialogs bottom content . < p > * @ param w the widget to add */ public void addBottomWidget ( Widget w ) { } }
m_content . removeStyleName ( I_CmsLayoutBundle . INSTANCE . dialogCss ( ) . alertMainContent ( ) ) ; m_bottomWidgets . getElement ( ) . getStyle ( ) . clearDisplay ( ) ; m_bottomWidgets . add ( w ) ;
public class JavacParser { /** * If next input token matches given token , skip it , otherwise report * an error . */ public void accept ( TokenKind tk ) { } }
if ( token . kind == tk ) { nextToken ( ) ; } else { setErrorEndPos ( token . pos ) ; reportSyntaxError ( S . prevToken ( ) . endPos , "expected" , tk ) ; }
public class BasicRecordStoreLoader { /** * Invokes an operation to put the provided key - value pairs to the partition * record store . * @ param keyValueSequence the list of serialised alternating key - value pairs * @ return the future representing the pending completion of the put operation */ private Future < ? > sendOperation ( List < Data > keyValueSequence ) { } }
OperationService operationService = mapServiceContext . getNodeEngine ( ) . getOperationService ( ) ; Operation operation = createOperation ( keyValueSequence ) ; return operationService . invokeOnPartition ( MapService . SERVICE_NAME , operation , partitionId ) ;
public class SnapshotStore { /** * Creates a disk snapshot . */ private Snapshot createDiskSnapshot ( SnapshotDescriptor descriptor ) { } }
SnapshotFile file = new SnapshotFile ( SnapshotFile . createSnapshotFile ( storage . directory ( ) , storage . prefix ( ) , descriptor . index ( ) ) ) ; Snapshot snapshot = new FileSnapshot ( file , descriptor , this ) ; log . debug ( "Created disk snapshot: {}" , snapshot ) ; return snapshot ;
public class DatastreamResolverServlet { /** * Processes the servlet request and resolves the physical location of the * specified datastream . * @ param request * The servlet request . * @ param response * servlet The servlet response . * @ throws ServletException * If an error occurs that effects the servlet ' s basic operation . * @ throws IOException * If an error occurrs with an input or output operation . */ @ Override public void doGet ( HttpServletRequest request , HttpServletResponse response ) throws ServletException , IOException { } }
String id = null ; String dsPhysicalLocation = null ; String dsControlGroupType = null ; MIMETypedStream mimeTypedStream = null ; DisseminationService ds = null ; Timestamp keyTimestamp = null ; Timestamp currentTimestamp = null ; PrintWriter out = null ; ServletOutputStream outStream = null ; id = request . getParameter ( "id" ) . replace ( 'T' , ' ' ) ; logger . debug ( "Datastream tempID={}" , id ) ; logger . debug ( "DRS doGet()" ) ; try { // Check for required id parameter . if ( id == null || id . equalsIgnoreCase ( "" ) ) { String message = "[DatastreamResolverServlet] No datastream ID " + "specified in servlet request: " + request . getRequestURI ( ) ; logger . error ( message ) ; response . setStatus ( HttpServletResponse . SC_INTERNAL_SERVER_ERROR ) ; response . sendError ( HttpServletResponse . SC_INTERNAL_SERVER_ERROR , message ) ; return ; } id = id . replace ( 'T' , ' ' ) . replaceAll ( "/" , "" ) . trim ( ) ; // Get in - memory hashtable of mappings from Fedora server . ds = new DisseminationService ( m_server ) ; DatastreamMediation dm = DisseminationService . dsRegistry . get ( id ) ; if ( dm == null ) { StringBuffer entries = new StringBuffer ( ) ; Iterator < String > eIter = DisseminationService . dsRegistry . keySet ( ) . iterator ( ) ; while ( eIter . hasNext ( ) ) { entries . append ( "'" + eIter . next ( ) + "' " ) ; } throw new IOException ( "Cannot find datastream in temp registry by key: " + id + "\n" + "Reg entries: " + entries . toString ( ) ) ; } dsPhysicalLocation = dm . dsLocation ; dsControlGroupType = dm . dsControlGroupType ; if ( logger . isDebugEnabled ( ) ) { logger . debug ( "**************************** DatastreamResolverServlet dm.dsLocation: {}" , dm . dsLocation ) ; logger . debug ( "**************************** DatastreamResolverServlet dm.dsControlGroupType: {}" , dm . dsControlGroupType ) ; logger . debug ( "**************************** DatastreamResolverServlet dm.callUsername: {}" , dm . callUsername ) ; logger . debug ( "**************************** DatastreamResolverServlet dm.Password: {}" , dm . callPassword ) ; logger . debug ( "**************************** DatastreamResolverServlet dm.callbackRole: {}" , dm . callbackRole ) ; logger . debug ( "**************************** DatastreamResolverServlet dm.callbackBasicAuth: {}" , dm . callbackBasicAuth ) ; logger . debug ( "**************************** DatastreamResolverServlet dm.callBasicAuth: {}" , dm . callBasicAuth ) ; logger . debug ( "**************************** DatastreamResolverServlet dm.callbackSSl: {}" , dm . callbackSSL ) ; logger . debug ( "**************************** DatastreamResolverServlet dm.callSSl: {}" , dm . callSSL ) ; logger . debug ( "**************************** DatastreamResolverServlet non ssl port: {}" , fedoraServerPort ) ; logger . debug ( "**************************** DatastreamResolverServlet ssl port: {}" , fedoraServerRedirectPort ) ; } // DatastreamResolverServlet maps to two distinct servlet mappings // in fedora web . xml . // getDS - is used when the backend service is incapable of // basicAuth or SSL // getDSAuthenticated - is used when the backend service has // basicAuth and SSL enabled // Since both the getDS and getDSAuthenticated servlet targets map // to the same servlet // code and the Context used to initialize policy enforcement is // based on the incoming // HTTPRequest , the code must provide special handling for requests // using the getDS // target . When the incoming URL to DatastreamResolverServlet // contains the getDS target , // there are several conditions that must be checked to insure that // the correct role is // assigned to the request before policy enforcement occurs . // 1 ) if the mapped dsPhysicalLocation of the request is actually a // callback to the // Fedora server itself , then assign the role as // BACKEND _ SERVICE _ CALL _ UNSECURE so // the basicAuth and SSL constraints will match those of the getDS // target . // 2 ) if the mapped dsPhysicalLocation of the request is actually a // Managed Content // or Inline XML Content datastream , then assign the role as // BACKEND _ SERVICE _ CALL _ UNSECURE so // the basicAuth and SSL constraints will match the getDS target . // 3 ) Otherwise , leave the targetrole unchanged . if ( request . getRequestURI ( ) . endsWith ( "getDS" ) && ( ServerUtility . isURLFedoraServer ( dsPhysicalLocation ) || dsControlGroupType . equals ( "M" ) || dsControlGroupType . equals ( "X" ) ) ) { if ( logger . isDebugEnabled ( ) ) { logger . debug ( "*********************** Changed role from: " + dm . callbackRole + " to: " + BackendPolicies . BACKEND_SERVICE_CALL_UNSECURE ) ; } dm . callbackRole = BackendPolicies . BACKEND_SERVICE_CALL_UNSECURE ; } // If callback is to fedora server itself and callback is over SSL , // adjust the protocol and port // on the URL to match settings of Fedora server . This is necessary // since the SSL settings for the // backend service may have specified basicAuth = false , but contained // datastreams that are callbacks // to the local Fedora server which requires SSL . The version of // HttpClient currently in use does // not handle autoredirecting from http to https so it is necessary // to set the protocol and port // to the appropriate secure port . if ( dm . callbackRole . equals ( BackendPolicies . FEDORA_INTERNAL_CALL ) ) { if ( dm . callbackSSL ) { dsPhysicalLocation = dsPhysicalLocation . replaceFirst ( "http:" , "https:" ) ; dsPhysicalLocation = dsPhysicalLocation . replaceFirst ( fedoraServerPort , fedoraServerRedirectPort ) ; if ( logger . isDebugEnabled ( ) ) { logger . debug ( "*********************** DatastreamResolverServlet -- Was Fedora-to-Fedora call -- modified dsPhysicalLocation: " + dsPhysicalLocation ) ; } } } keyTimestamp = Timestamp . valueOf ( ds . extractTimestamp ( id ) ) ; currentTimestamp = new Timestamp ( new Date ( ) . getTime ( ) ) ; logger . debug ( "dsPhysicalLocation={} dsControlGroupType={}" , dsPhysicalLocation , dsControlGroupType ) ; // Deny mechanism requests that fall outside the specified time // interval . // The expiration limit can be adjusted using the Fedora config // parameter // named " datastreamMediationLimit " which is in milliseconds . long diff = currentTimestamp . getTime ( ) - keyTimestamp . getTime ( ) ; logger . debug ( "Timestamp diff for mechanism's reponse: {} ms." , diff ) ; if ( diff > datastreamMediationLimit ) { out = response . getWriter ( ) ; response . setContentType ( HTML_CONTENT_TYPE ) ; out . println ( "<br><b>[DatastreamResolverServlet] Error:</b>" + "<font color=\"red\"> Deployment has failed to respond " + "to the DatastreamResolverServlet within the specified " + "time limit of \"" + datastreamMediationLimit + "\"" + "milliseconds. Datastream access denied." ) ; logger . error ( "Deployment failed to respond to " + "DatastreamResolverServlet within time limit of " + datastreamMediationLimit ) ; out . close ( ) ; return ; } if ( dm . callbackRole == null ) { throw new AuthzOperationalException ( "no callbackRole for this ticket" ) ; } String targetRole = // Authorization . FEDORA _ ROLE _ KEY + " = " + dm . callbackRole ; // restrict access to role of this // ticket String [ ] targetRoles = { targetRole } ; Context context = ReadOnlyContext . getContext ( Constants . HTTP_REQUEST . REST . uri , request ) ; // , targetRoles ) ; if ( request . getRemoteUser ( ) == null ) { // non - authn : must accept target role of ticket logger . debug ( "DatastreamResolverServlet: unAuthenticated request" ) ; } else { // authn : check user roles for target role of ticket /* * logger . debug ( " DatastreamResolverServlet : Authenticated request * getting user " ) ; String [ ] roles = null ; Principal principal = * request . getUserPrincipal ( ) ; if ( principal = = null ) { / / no * principal to grok roles from ! ! } else { try { roles = * ReadOnlyContext . getRoles ( principal ) ; } catch ( Throwable t ) { } } * if ( roles = = null ) { roles = EMPTY _ STRING _ ARRAY ; } */ // XXXXXxif ( contains ( roles , targetRole ) ) { logger . debug ( "DatastreamResolverServlet: user=={}" , request . getRemoteUser ( ) ) ; /* * if * ( ( ( ExtendedHttpServletRequest ) request ) . isUserInRole ( targetRole ) ) { * logger . debug ( " DatastreamResolverServlet : user has required * role " ) ; } else { logger . debug ( " DatastreamResolverServlet : authZ * exception in validating user " ) ; throw new * AuthzDeniedException ( " wrong user for this ticket " ) ; } */ } if ( logger . isDebugEnabled ( ) ) { logger . debug ( "debugging backendService role" ) ; logger . debug ( "targetRole=" + targetRole ) ; int targetRolesLength = targetRoles . length ; logger . debug ( "targetRolesLength=" + targetRolesLength ) ; if ( targetRolesLength > 0 ) { logger . debug ( "targetRoles[0]=" + targetRoles [ 0 ] ) ; } int nSubjectValues = context . nSubjectValues ( targetRole ) ; logger . debug ( "nSubjectValues=" + nSubjectValues ) ; if ( nSubjectValues > 0 ) { logger . debug ( "context.getSubjectValue(targetRole)=" + context . getSubjectValue ( targetRole ) ) ; } Iterator < String > subjectNames = context . subjectAttributes ( ) ; while ( subjectNames . hasNext ( ) ) { String name = subjectNames . next ( ) ; int n = context . nSubjectValues ( name ) ; switch ( n ) { case 0 : logger . debug ( "no subject attributes for " + name ) ; break ; case 1 : String value = context . getSubjectValue ( name ) ; logger . debug ( "single subject attributes for " + name + "=" + value ) ; break ; default : String [ ] values = context . getSubjectValues ( name ) ; for ( String element : values ) { logger . debug ( "another subject attribute from context " + name + "=" + element ) ; } } } Iterator < URI > it = context . environmentAttributes ( ) ; while ( it . hasNext ( ) ) { URI name = it . next ( ) ; String value = context . getEnvironmentValue ( name ) ; logger . debug ( "another environment attribute from context " + name + "=" + value ) ; } } /* * / / Enforcement of Backend Security is temporarily disabled * pending refactoring . / / logger . debug ( " DatastreamResolverServlet : * about to do final authZ check " ) ; Authorization authorization = * ( Authorization ) s _ server * . getModule ( " org . fcrepo . server . security . Authorization " ) ; * authorization . enforceResolveDatastream ( context , keyTimestamp ) ; * logger . debug ( " DatastreamResolverServlet : final authZ check * suceeded . . . . . " ) ; */ if ( dsControlGroupType . equalsIgnoreCase ( "E" ) ) { // testing to see what ' s in request header that might be of // interest if ( logger . isDebugEnabled ( ) ) { for ( @ SuppressWarnings ( "unchecked" ) Enumeration < String > e = request . getHeaderNames ( ) ; e . hasMoreElements ( ) ; ) { String name = e . nextElement ( ) ; @ SuppressWarnings ( "unchecked" ) Enumeration < String > headerValues = request . getHeaders ( name ) ; StringBuffer sb = new StringBuffer ( ) ; while ( headerValues . hasMoreElements ( ) ) { sb . append ( headerValues . nextElement ( ) ) ; } String value = sb . toString ( ) ; logger . debug ( "DATASTREAMRESOLVERSERVLET REQUEST HEADER CONTAINED: {} : {}" , name , value ) ; } } // Datastream is ReferencedExternalContent so dsLocation is a // URL string ExternalContentManager externalContentManager = ( ExternalContentManager ) m_server . getModule ( "org.fcrepo.server.storage.ExternalContentManager" ) ; ContentManagerParams params = new ContentManagerParams ( dsPhysicalLocation ) ; params . setContext ( context ) ; mimeTypedStream = externalContentManager . getExternalContent ( params ) ; // had substituted context : // ReadOnlyContext . getContext ( Constants . HTTP _ REQUEST . REST . uri , // request ) ) ; outStream = response . getOutputStream ( ) ; response . setContentType ( mimeTypedStream . getMIMEType ( ) ) ; Property [ ] headerArray = mimeTypedStream . header ; if ( headerArray != null ) { for ( int i = 0 ; i < headerArray . length ; i ++ ) { if ( headerArray [ i ] . name != null && ! headerArray [ i ] . name . equalsIgnoreCase ( "content-type" ) ) { response . addHeader ( headerArray [ i ] . name , headerArray [ i ] . value ) ; logger . debug ( "THIS WAS ADDED TO DATASTREAMRESOLVERSERVLET RESPONSE HEADER FROM ORIGINATING PROVIDER {} : {}" , headerArray [ i ] . name , headerArray [ i ] . value ) ; } } } int byteStream = 0 ; byte [ ] buffer = new byte [ 255 ] ; while ( ( byteStream = mimeTypedStream . getStream ( ) . read ( buffer ) ) != - 1 ) { outStream . write ( buffer , 0 , byteStream ) ; } buffer = null ; outStream . flush ( ) ; mimeTypedStream . close ( ) ; } else if ( dsControlGroupType . equalsIgnoreCase ( "M" ) || dsControlGroupType . equalsIgnoreCase ( "X" ) ) { // Datastream is either XMLMetadata or ManagedContent so // dsLocation // is in the form of an internal Fedora ID using the syntax : // PID + DSID + DSVersID ; parse the ID and get the datastream // content . String PID = null ; String dsVersionID = null ; String dsID = null ; String [ ] s = dsPhysicalLocation . split ( "\\+" ) ; if ( s . length != 3 ) { String message = "[DatastreamResolverServlet] The " + "internal Fedora datastream id: \"" + dsPhysicalLocation + "\" is invalid." ; logger . error ( message ) ; throw new ServletException ( message ) ; } PID = s [ 0 ] ; dsID = s [ 1 ] ; dsVersionID = s [ 2 ] ; logger . debug ( "PID={}, dsID={}, dsVersionID={}" , PID , dsID , dsVersionID ) ; DOReader doReader = m_manager . getReader ( Server . USE_DEFINITIVE_STORE , context , PID ) ; Datastream d = doReader . getDatastream ( dsID , dsVersionID ) ; logger . debug ( "Got datastream: {}" , d . DatastreamID ) ; InputStream is = d . getContentStream ( context ) ; int bytestream = 0 ; response . setContentType ( d . DSMIME ) ; outStream = response . getOutputStream ( ) ; byte [ ] buffer = new byte [ 255 ] ; while ( ( bytestream = is . read ( buffer ) ) != - 1 ) { outStream . write ( buffer , 0 , bytestream ) ; } buffer = null ; is . close ( ) ; } else { out = response . getWriter ( ) ; response . setContentType ( HTML_CONTENT_TYPE ) ; out . println ( "<br>[DatastreamResolverServlet] Unknown " + "dsControlGroupType: " + dsControlGroupType + "</br>" ) ; logger . error ( "Unknown dsControlGroupType: " + dsControlGroupType ) ; } } catch ( AuthzException ae ) { logger . error ( "Authorization failure resolving datastream" + " (actionLabel=" + ACTION_LABEL + ")" , ae ) ; throw RootException . getServletException ( ae , request , ACTION_LABEL , EMPTY_STRING_ARRAY ) ; } catch ( Throwable th ) { logger . error ( "Error resolving datastream" , th ) ; String message = "[DatastreamResolverServlet] returned an error. The " + "underlying error was a \"" + th . getClass ( ) . getName ( ) + " The message was \"" + th . getMessage ( ) + "\". " ; throw new ServletException ( message ) ; } finally { if ( out != null ) { out . close ( ) ; } if ( outStream != null ) { outStream . close ( ) ; } DisseminationService . dsRegistry . remove ( id ) ; }
public class HylaFaxClientSpi { /** * Returns an instance of the hyla fax client . * @ return The client instance */ protected synchronized HylaFAXClient getHylaFAXClient ( ) { } }
HylaFAXClient client = null ; if ( this . connection == null ) { // create new connection this . connection = this . connectionFactory . createConnection ( ) ; } // get client client = this . connection . getResource ( ) ; return client ;
public class GalleryServiceImpl { /** * A kind of inverse lookup - finding the public path given the actual file . * < strong > NOTE ! This method does NOT verify that the current user actually * has the right to access the given publicRoot ! It is the responsibility of * calling methods to make sure only allowed root paths are used . < / strong > * @ param publicRoot * @ param file * @ return The public path of the given file for the given publicRoot . * @ throws IOException * @ throws NotAllowedException */ @ Override public String getPublicPathFromRealFile ( String publicRoot , File file ) throws IOException , NotAllowedException { } }
String actualFilePath = file . getCanonicalPath ( ) ; File rootFile = galleryAuthorizationService . getRootPathsForCurrentUser ( ) . get ( publicRoot ) ; String relativePath = actualFilePath . substring ( rootFile . getCanonicalPath ( ) . length ( ) , actualFilePath . length ( ) ) ; StringBuilder builder = new StringBuilder ( ) ; builder . append ( publicRoot ) ; builder . append ( relativePath ) ; String publicPath = separatorsToUnix ( builder . toString ( ) ) ; LOG . debug ( "Actual file: {}, generated public path: {}" , file , publicPath ) ; return publicPath ;
public class UploadImage { /** * Runs the example . * @ param adWordsServices the services factory . * @ param session the session . * @ throws ApiException if the API request failed with one or more service errors . * @ throws RemoteException if the API request failed due to other errors . * @ throws IOException if unable to get media data from the URL . */ public static void runExample ( AdWordsServicesInterface adWordsServices , AdWordsSession session ) throws IOException { } }
// Get the MediaService . MediaServiceInterface mediaService = adWordsServices . get ( session , MediaServiceInterface . class ) ; // Create image . Image image = new Image ( ) ; image . setData ( com . google . api . ads . common . lib . utils . Media . getMediaDataFromUrl ( "https://goo.gl/3b9Wfh" ) ) ; image . setType ( MediaMediaType . IMAGE ) ; Media [ ] media = new Media [ ] { image } ; // Upload image . Media [ ] result = mediaService . upload ( media ) ; // Display images . image = ( Image ) result [ 0 ] ; Map < MediaSize , Dimensions > dimensions = Maps . toMap ( image . getDimensions ( ) ) ; System . out . printf ( "Image with ID %d, dimensions %dx%d, and MIME type '%s' was " + "uploaded.%n" , image . getMediaId ( ) , dimensions . get ( MediaSize . FULL ) . getWidth ( ) , dimensions . get ( MediaSize . FULL ) . getHeight ( ) , image . getMediaType ( ) ) ;
public class JavaParser { /** * src / main / resources / org / drools / compiler / semantics / java / parser / Java . g : 1203:1 : unaryExpressionNotPlusMinus : ( ' ~ ' unaryExpression | ' ! ' unaryExpression | castExpression | primary ( selector ) * ( ' + + ' | ' - - ' ) ? ) ; */ public final void unaryExpressionNotPlusMinus ( ) throws RecognitionException { } }
int unaryExpressionNotPlusMinus_StartIndex = input . index ( ) ; try { if ( state . backtracking > 0 && alreadyParsedRule ( input , 125 ) ) { return ; } // src / main / resources / org / drools / compiler / semantics / java / parser / Java . g : 1204:5 : ( ' ~ ' unaryExpression | ' ! ' unaryExpression | castExpression | primary ( selector ) * ( ' + + ' | ' - - ' ) ? ) int alt159 = 4 ; switch ( input . LA ( 1 ) ) { case 126 : { alt159 = 1 ; } break ; case 29 : { alt159 = 2 ; } break ; case 36 : { switch ( input . LA ( 2 ) ) { case 65 : case 67 : case 71 : case 77 : case 85 : case 92 : case 94 : case 105 : { int LA159_20 = input . LA ( 3 ) ; if ( ( synpred240_Java ( ) ) ) { alt159 = 3 ; } else if ( ( true ) ) { alt159 = 4 ; } } break ; case Identifier : { int LA159_21 = input . LA ( 3 ) ; if ( ( synpred240_Java ( ) ) ) { alt159 = 3 ; } else if ( ( true ) ) { alt159 = 4 ; } } break ; case 40 : { int LA159_22 = input . LA ( 3 ) ; if ( ( synpred240_Java ( ) ) ) { alt159 = 3 ; } else if ( ( true ) ) { alt159 = 4 ; } } break ; case 44 : { int LA159_23 = input . LA ( 3 ) ; if ( ( synpred240_Java ( ) ) ) { alt159 = 3 ; } else if ( ( true ) ) { alt159 = 4 ; } } break ; case 41 : { int LA159_24 = input . LA ( 3 ) ; if ( ( synpred240_Java ( ) ) ) { alt159 = 3 ; } else if ( ( true ) ) { alt159 = 4 ; } } break ; case 45 : { int LA159_25 = input . LA ( 3 ) ; if ( ( synpred240_Java ( ) ) ) { alt159 = 3 ; } else if ( ( true ) ) { alt159 = 4 ; } } break ; case 126 : { int LA159_26 = input . LA ( 3 ) ; if ( ( synpred240_Java ( ) ) ) { alt159 = 3 ; } else if ( ( true ) ) { alt159 = 4 ; } } break ; case 29 : { int LA159_27 = input . LA ( 3 ) ; if ( ( synpred240_Java ( ) ) ) { alt159 = 3 ; } else if ( ( true ) ) { alt159 = 4 ; } } break ; case 36 : { int LA159_28 = input . LA ( 3 ) ; if ( ( synpred240_Java ( ) ) ) { alt159 = 3 ; } else if ( ( true ) ) { alt159 = 4 ; } } break ; case 53 : { int LA159_29 = input . LA ( 3 ) ; if ( ( synpred240_Java ( ) ) ) { alt159 = 3 ; } else if ( ( true ) ) { alt159 = 4 ; } } break ; case 111 : { int LA159_30 = input . LA ( 3 ) ; if ( ( synpred240_Java ( ) ) ) { alt159 = 3 ; } else if ( ( true ) ) { alt159 = 4 ; } } break ; case 108 : { int LA159_31 = input . LA ( 3 ) ; if ( ( synpred240_Java ( ) ) ) { alt159 = 3 ; } else if ( ( true ) ) { alt159 = 4 ; } } break ; case 80 : { int LA159_32 = input . LA ( 3 ) ; if ( ( synpred240_Java ( ) ) ) { alt159 = 3 ; } else if ( ( true ) ) { alt159 = 4 ; } } break ; case 79 : { int LA159_33 = input . LA ( 3 ) ; if ( ( synpred240_Java ( ) ) ) { alt159 = 3 ; } else if ( ( true ) ) { alt159 = 4 ; } } break ; case 70 : { int LA159_34 = input . LA ( 3 ) ; if ( ( synpred240_Java ( ) ) ) { alt159 = 3 ; } else if ( ( true ) ) { alt159 = 4 ; } } break ; case DecimalLiteral : case HexLiteral : case OctalLiteral : { int LA159_35 = input . LA ( 3 ) ; if ( ( synpred240_Java ( ) ) ) { alt159 = 3 ; } else if ( ( true ) ) { alt159 = 4 ; } } break ; case FloatingPointLiteral : { int LA159_36 = input . LA ( 3 ) ; if ( ( synpred240_Java ( ) ) ) { alt159 = 3 ; } else if ( ( true ) ) { alt159 = 4 ; } } break ; case CharacterLiteral : { int LA159_37 = input . LA ( 3 ) ; if ( ( synpred240_Java ( ) ) ) { alt159 = 3 ; } else if ( ( true ) ) { alt159 = 4 ; } } break ; case StringLiteral : { int LA159_38 = input . LA ( 3 ) ; if ( ( synpred240_Java ( ) ) ) { alt159 = 3 ; } else if ( ( true ) ) { alt159 = 4 ; } } break ; case 82 : case 115 : { int LA159_39 = input . LA ( 3 ) ; if ( ( synpred240_Java ( ) ) ) { alt159 = 3 ; } else if ( ( true ) ) { alt159 = 4 ; } } break ; case 98 : { int LA159_40 = input . LA ( 3 ) ; if ( ( synpred240_Java ( ) ) ) { alt159 = 3 ; } else if ( ( true ) ) { alt159 = 4 ; } } break ; case 97 : { int LA159_41 = input . LA ( 3 ) ; if ( ( synpred240_Java ( ) ) ) { alt159 = 3 ; } else if ( ( true ) ) { alt159 = 4 ; } } break ; case 118 : { int LA159_42 = input . LA ( 3 ) ; if ( ( synpred240_Java ( ) ) ) { alt159 = 3 ; } else if ( ( true ) ) { alt159 = 4 ; } } break ; default : if ( state . backtracking > 0 ) { state . failed = true ; return ; } int nvaeMark = input . mark ( ) ; try { input . consume ( ) ; NoViableAltException nvae = new NoViableAltException ( "" , 159 , 3 , input ) ; throw nvae ; } finally { input . rewind ( nvaeMark ) ; } } } break ; case CharacterLiteral : case DecimalLiteral : case FloatingPointLiteral : case HexLiteral : case Identifier : case OctalLiteral : case StringLiteral : case 53 : case 65 : case 67 : case 70 : case 71 : case 77 : case 79 : case 80 : case 82 : case 85 : case 92 : case 94 : case 97 : case 98 : case 105 : case 108 : case 111 : case 115 : case 118 : { alt159 = 4 ; } break ; default : if ( state . backtracking > 0 ) { state . failed = true ; return ; } NoViableAltException nvae = new NoViableAltException ( "" , 159 , 0 , input ) ; throw nvae ; } switch ( alt159 ) { case 1 : // src / main / resources / org / drools / compiler / semantics / java / parser / Java . g : 1204:9 : ' ~ ' unaryExpression { match ( input , 126 , FOLLOW_126_in_unaryExpressionNotPlusMinus5549 ) ; if ( state . failed ) return ; pushFollow ( FOLLOW_unaryExpression_in_unaryExpressionNotPlusMinus5551 ) ; unaryExpression ( ) ; state . _fsp -- ; if ( state . failed ) return ; } break ; case 2 : // src / main / resources / org / drools / compiler / semantics / java / parser / Java . g : 1205:8 : ' ! ' unaryExpression { match ( input , 29 , FOLLOW_29_in_unaryExpressionNotPlusMinus5560 ) ; if ( state . failed ) return ; pushFollow ( FOLLOW_unaryExpression_in_unaryExpressionNotPlusMinus5562 ) ; unaryExpression ( ) ; state . _fsp -- ; if ( state . failed ) return ; } break ; case 3 : // src / main / resources / org / drools / compiler / semantics / java / parser / Java . g : 1206:9 : castExpression { pushFollow ( FOLLOW_castExpression_in_unaryExpressionNotPlusMinus5572 ) ; castExpression ( ) ; state . _fsp -- ; if ( state . failed ) return ; } break ; case 4 : // src / main / resources / org / drools / compiler / semantics / java / parser / Java . g : 1207:9 : primary ( selector ) * ( ' + + ' | ' - - ' ) ? { pushFollow ( FOLLOW_primary_in_unaryExpressionNotPlusMinus5582 ) ; primary ( ) ; state . _fsp -- ; if ( state . failed ) return ; // src / main / resources / org / drools / compiler / semantics / java / parser / Java . g : 1207:17 : ( selector ) * loop157 : while ( true ) { int alt157 = 2 ; int LA157_0 = input . LA ( 1 ) ; if ( ( LA157_0 == 47 || LA157_0 == 59 ) ) { alt157 = 1 ; } switch ( alt157 ) { case 1 : // src / main / resources / org / drools / compiler / semantics / java / parser / Java . g : 1207:17 : selector { pushFollow ( FOLLOW_selector_in_unaryExpressionNotPlusMinus5584 ) ; selector ( ) ; state . _fsp -- ; if ( state . failed ) return ; } break ; default : break loop157 ; } } // src / main / resources / org / drools / compiler / semantics / java / parser / Java . g : 1207:27 : ( ' + + ' | ' - - ' ) ? int alt158 = 2 ; int LA158_0 = input . LA ( 1 ) ; if ( ( LA158_0 == 41 || LA158_0 == 45 ) ) { alt158 = 1 ; } switch ( alt158 ) { case 1 : // src / main / resources / org / drools / compiler / semantics / java / parser / Java . g : { if ( input . LA ( 1 ) == 41 || input . LA ( 1 ) == 45 ) { input . consume ( ) ; state . errorRecovery = false ; state . failed = false ; } else { if ( state . backtracking > 0 ) { state . failed = true ; return ; } MismatchedSetException mse = new MismatchedSetException ( null , input ) ; throw mse ; } } break ; } } break ; } } catch ( RecognitionException re ) { reportError ( re ) ; recover ( input , re ) ; } finally { // do for sure before leaving if ( state . backtracking > 0 ) { memoize ( input , 125 , unaryExpressionNotPlusMinus_StartIndex ) ; } }
public class HTTPRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( HTTPRequest hTTPRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( hTTPRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( hTTPRequest . getClientIP ( ) , CLIENTIP_BINDING ) ; protocolMarshaller . marshall ( hTTPRequest . getCountry ( ) , COUNTRY_BINDING ) ; protocolMarshaller . marshall ( hTTPRequest . getURI ( ) , URI_BINDING ) ; protocolMarshaller . marshall ( hTTPRequest . getMethod ( ) , METHOD_BINDING ) ; protocolMarshaller . marshall ( hTTPRequest . getHTTPVersion ( ) , HTTPVERSION_BINDING ) ; protocolMarshaller . marshall ( hTTPRequest . getHeaders ( ) , HEADERS_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class SchemaFactoryFinder { /** * < p > Creates an instance of the specified and returns it . < / p > * @ param className * fully qualified class name to be instantiated . * @ return null * if it fails . Error messages will be printed by this method . */ SchemaFactory createInstance ( String className ) { } }
try { if ( debug ) debugPrintln ( "instantiating " + className ) ; Class clazz ; if ( classLoader != null ) clazz = classLoader . loadClass ( className ) ; else clazz = Class . forName ( className ) ; if ( debug ) debugPrintln ( "loaded it from " + which ( clazz ) ) ; Object o = clazz . newInstance ( ) ; if ( o instanceof SchemaFactory ) return ( SchemaFactory ) o ; if ( debug ) debugPrintln ( className + " is not assignable to " + SERVICE_CLASS . getName ( ) ) ; } // The VM ran out of memory or there was some other serious problem . Re - throw . catch ( VirtualMachineError vme ) { throw vme ; } // ThreadDeath should always be re - thrown catch ( ThreadDeath td ) { throw td ; } catch ( Throwable t ) { debugPrintln ( "failed to instantiate " + className ) ; if ( debug ) t . printStackTrace ( ) ; } return null ;
public class AutoFringer { /** * Retrieve or compose an image for the specified fringe . */ protected BufferedImage getTileImage ( BufferedImage img , FringeConfiguration . FringeTileSetRecord tsr , int baseset , int index , int hashValue , Map < Long , BufferedImage > masks ) throws NoSuchTileSetException { } }
int fringeset = tsr . fringe_tsid ; TileSet fset = _tmgr . getTileSet ( fringeset ) ; if ( ! tsr . mask ) { // oh good , this is easy Tile stamp = fset . getTile ( index ) ; return stampTileImage ( stamp , img , stamp . getWidth ( ) , stamp . getHeight ( ) ) ; } // otherwise , it ' s a mask . . Long maskkey = Long . valueOf ( ( ( ( long ) baseset ) << 32 ) + ( fringeset << 16 ) + index ) ; BufferedImage mask = masks . get ( maskkey ) ; if ( mask == null ) { BufferedImage fsrc = _tmgr . getTileSet ( fringeset ) . getRawTileImage ( index ) ; BufferedImage bsrc = _tmgr . getTileSet ( baseset ) . getRawTileImage ( 0 ) ; mask = ImageUtil . composeMaskedImage ( _imgr , fsrc , bsrc ) ; masks . put ( maskkey , mask ) ; } return stampTileImage ( mask , img , mask . getWidth ( null ) , mask . getHeight ( null ) ) ;