signature
stringlengths 43
39.1k
| implementation
stringlengths 0
450k
|
|---|---|
public class SpiderScan { /** * Pauses the scan .
* The call to this method has no effect if the scan is not running . */
@ Override public void pauseScan ( ) { } }
|
lock . lock ( ) ; try { if ( State . RUNNING . equals ( state ) ) { spiderThread . pauseScan ( ) ; state = State . PAUSED ; SpiderEventPublisher . publishScanEvent ( ScanEventPublisher . SCAN_PAUSED_EVENT , this . scanId ) ; } } finally { lock . unlock ( ) ; }
|
public class Blade { /** * Set to start the web server to monitor port , the default is 9000
* @ param port web server port , default is 9000
* @ return blade */
public Blade listen ( int port ) { } }
|
Assert . greaterThan ( port , 0 , "server port not is negative number." ) ; this . environment . set ( ENV_KEY_SERVER_PORT , port ) ; return this ;
|
public class SamlObjectSignatureValidator { /** * Verify saml profile request if needed .
* @ param profileRequest the profile request
* @ param resolver the resolver
* @ param request the request
* @ param context the context
* @ throws Exception the exception */
public void verifySamlProfileRequestIfNeeded ( final RequestAbstractType profileRequest , final MetadataResolver resolver , final HttpServletRequest request , final MessageContext context ) throws Exception { } }
|
val roleDescriptorResolver = getRoleDescriptorResolver ( resolver , context , profileRequest ) ; LOGGER . debug ( "Validating signature for [{}]" , profileRequest . getClass ( ) . getName ( ) ) ; val signature = profileRequest . getSignature ( ) ; if ( signature != null ) { validateSignatureOnProfileRequest ( profileRequest , signature , roleDescriptorResolver ) ; } else { validateSignatureOnAuthenticationRequest ( profileRequest , request , context , roleDescriptorResolver ) ; }
|
public class Client { /** * Deletes set of series by a filter .
* @ param filter The series filter @ see Filter
* @ return A DeleteSummary providing information about the series deleted .
* @ see DeleteSummary
* @ see Filter
* @ since 1.0.0 */
public Result < DeleteSummary > deleteSeries ( Filter filter ) { } }
|
URI uri = null ; try { URIBuilder builder = new URIBuilder ( String . format ( "/%s/series/" , API_VERSION ) ) ; addFilterToURI ( builder , filter ) ; uri = builder . build ( ) ; } catch ( URISyntaxException e ) { String message = String . format ( "Could not build URI with input - filter: %s" , filter ) ; throw new IllegalArgumentException ( message , e ) ; } HttpRequest request = buildRequest ( uri . toString ( ) , HttpMethod . DELETE ) ; Result < DeleteSummary > result = execute ( request , DeleteSummary . class ) ; return result ;
|
public class DataLabelingServiceClient { /** * Imports data into dataset based on source locations defined in request . It can be called
* multiple times for the same dataset . Each dataset can only have one long running operation
* running on it . For example , no labeling task ( also long running operation ) can be started while
* importing is still ongoing . Vice versa .
* < p > Sample code :
* < pre > < code >
* try ( DataLabelingServiceClient dataLabelingServiceClient = DataLabelingServiceClient . create ( ) ) {
* String formattedName = DataLabelingServiceClient . formatDatasetName ( " [ PROJECT ] " , " [ DATASET ] " ) ;
* InputConfig inputConfig = InputConfig . newBuilder ( ) . build ( ) ;
* ImportDataOperationResponse response = dataLabelingServiceClient . importDataAsync ( formattedName , inputConfig ) . get ( ) ;
* < / code > < / pre >
* @ param name Required . Dataset resource name , format :
* projects / { project _ id } / datasets / { dataset _ id }
* @ param inputConfig Required . Specify the input source of the data .
* @ throws com . google . api . gax . rpc . ApiException if the remote call fails */
@ BetaApi ( "The surface for long-running operations is not stable yet and may change in the future." ) public final OperationFuture < ImportDataOperationResponse , ImportDataOperationMetadata > importDataAsync ( String name , InputConfig inputConfig ) { } }
|
DATASET_PATH_TEMPLATE . validate ( name , "importData" ) ; ImportDataRequest request = ImportDataRequest . newBuilder ( ) . setName ( name ) . setInputConfig ( inputConfig ) . build ( ) ; return importDataAsync ( request ) ;
|
public class FlowUtils { /** * Configures all queues being used in a flow .
* @ return A Multimap from flowletId to QueueName where the flowlet is a consumer of . */
public static Multimap < String , QueueName > configureQueue ( Program program , FlowSpecification flowSpec , QueueAdmin queueAdmin ) { } }
|
// Generate all queues specifications
Table < QueueSpecificationGenerator . Node , String , Set < QueueSpecification > > queueSpecs = new SimpleQueueSpecificationGenerator ( ) . create ( flowSpec ) ; // For each queue in the flow , gather a map of consumer groupId to number of instances
Table < QueueName , Long , Integer > queueConfigs = HashBasedTable . create ( ) ; // For storing result from flowletId to queue .
ImmutableSetMultimap . Builder < String , QueueName > resultBuilder = ImmutableSetMultimap . builder ( ) ; // Loop through each flowlet
for ( Map . Entry < String , FlowletDefinition > entry : flowSpec . getFlowlets ( ) . entrySet ( ) ) { String flowletId = entry . getKey ( ) ; long groupId = FlowUtils . generateConsumerGroupId ( program , flowletId ) ; int instances = entry . getValue ( ) . getInstances ( ) ; // For each queue that the flowlet is a consumer , store the number of instances for this flowlet
for ( QueueSpecification queueSpec : Iterables . concat ( queueSpecs . column ( flowletId ) . values ( ) ) ) { queueConfigs . put ( queueSpec . getQueueName ( ) , groupId , instances ) ; resultBuilder . put ( flowletId , queueSpec . getQueueName ( ) ) ; } } try { // For each queue in the flow , configure it through QueueAdmin
for ( Map . Entry < QueueName , Map < Long , Integer > > row : queueConfigs . rowMap ( ) . entrySet ( ) ) { LOG . info ( "Queue config for {} : {}" , row . getKey ( ) , row . getValue ( ) ) ; queueAdmin . configureGroups ( row . getKey ( ) , row . getValue ( ) ) ; } return resultBuilder . build ( ) ; } catch ( Exception e ) { LOG . error ( "Failed to configure queues" , e ) ; throw Throwables . propagate ( e ) ; }
|
public class SDMath { /** * Returns the pair - wise cross product of equal size arrays a and b : a x b = | | a | | x | | b | | sin ( theta ) .
* Can take rank 1 or above inputs ( of equal shapes ) , but note that the last dimension must have dimension 3
* @ param a First input
* @ param b Second input
* @ return Element - wise cross product */
public SDVariable cross ( String name , SDVariable a , SDVariable b ) { } }
|
validateNumerical ( "cross" , a , b ) ; SDVariable ret = f ( ) . cross ( a , b ) ; return updateVariableNameAndReference ( ret , name ) ;
|
public class AsciiTable { /** * Sets the text alignment for all cells in the table .
* @ param textAlignment new text alignment
* @ throws NullPointerException if the argument was null
* @ return this to allow chaining
* @ throws { @ link NullPointerException } if the argument was null */
public AsciiTable setTextAlignment ( TextAlignment textAlignment ) { } }
|
for ( AT_Row row : this . rows ) { if ( row . getType ( ) == TableRowType . CONTENT ) { row . setTextAlignment ( textAlignment ) ; } } return this ;
|
public class CmsXmlContentRepairSettings { /** * Sets the VFS folder under which XML contents will be processed recursively . < p >
* @ param vfsFolder the VFS folder under which XML contents will be processed recursively
* @ throws CmsIllegalArgumentException if the given VFS path is not valid */
public void setVfsFolder ( String vfsFolder ) throws CmsIllegalArgumentException { } }
|
if ( CmsStringUtil . isEmptyOrWhitespaceOnly ( vfsFolder ) ) { throw new CmsIllegalArgumentException ( Messages . get ( ) . container ( Messages . ERR_VALUE_EMPTY_0 ) ) ; } // test if it is a valid path
if ( ! m_cms . existsResource ( vfsFolder , CmsResourceFilter . ALL . addRequireFolder ( ) ) ) { throw new CmsIllegalArgumentException ( Messages . get ( ) . container ( Messages . ERR_XMLCONTENT_VFSFOLDER_1 , vfsFolder ) ) ; } m_vfsFolder = vfsFolder ;
|
public class CmsJspTagEditable { /** * Inserts direct edit for empty collector lists . < p >
* @ throws CmsException in case of invalid collector settings
* @ throws JspException in case writing to page context fails */
private void insertEditEmpty ( ) throws CmsException , JspException { } }
|
Tag ancestor = findAncestorWithClass ( this , I_CmsXmlContentContainer . class ) ; I_CmsXmlContentContainer container = null ; if ( ancestor != null ) { // parent content container available , use preloaded values from this container
container = ( I_CmsXmlContentContainer ) ancestor ; insertEditEmpty ( pageContext , container , m_mode == null ? CmsDirectEditMode . AUTO : m_mode , null ) ; }
|
public class Timecode { /** * Add some samples to this timecode
* @ param samples
* @ return */
public Timecode add ( SampleCount samples ) { } }
|
final SampleCount mySamples = getSampleCount ( ) ; final SampleCount totalSamples = mySamples . add ( samples ) ; return TimecodeBuilder . fromSamples ( totalSamples , dropFrame ) . build ( ) ;
|
public class ControllerHandler { /** * Configures the controller method arguments .
* @ param injector */
protected void configureMethodArgs ( Injector injector ) { } }
|
Class < ? > [ ] types = method . getParameterTypes ( ) ; extractors = new ArgumentExtractor [ types . length ] ; patterns = new String [ types . length ] ; for ( int i = 0 ; i < types . length ; i ++ ) { final Parameter parameter = method . getParameters ( ) [ i ] ; final Class < ? extends Collection > collectionType ; final Class < ? > objectType ; if ( Collection . class . isAssignableFrom ( types [ i ] ) ) { collectionType = ( Class < ? extends Collection > ) types [ i ] ; objectType = getParameterGenericType ( parameter ) ; } else { collectionType = null ; objectType = types [ i ] ; } // determine the appropriate extractor
Class < ? extends ArgumentExtractor > extractorType ; if ( FileItem . class == objectType ) { extractorType = FileItemExtractor . class ; } else { extractorType = ControllerUtil . getArgumentExtractor ( parameter ) ; } // instantiate the extractor
extractors [ i ] = injector . getInstance ( extractorType ) ; // configure the extractor
if ( extractors [ i ] instanceof ConfigurableExtractor < ? > ) { ConfigurableExtractor extractor = ( ConfigurableExtractor ) extractors [ i ] ; Annotation annotation = ClassUtil . getAnnotation ( parameter , extractor . getAnnotationClass ( ) ) ; if ( annotation != null ) { extractor . configure ( annotation ) ; } } if ( extractors [ i ] instanceof SuffixExtractor ) { // the last parameter can be assigned content type suffixes
SuffixExtractor extractor = ( SuffixExtractor ) extractors [ i ] ; extractor . setSuffixes ( contentTypeSuffixes ) ; } if ( collectionType != null ) { if ( extractors [ i ] instanceof CollectionExtractor ) { CollectionExtractor extractor = ( CollectionExtractor ) extractors [ i ] ; extractor . setCollectionType ( collectionType ) ; } else { throw new FatalException ( "Controller method '{}' parameter {} of type '{}' does not specify an argument extractor that supports collections!" , Util . toString ( method ) , i + 1 , Util . toString ( collectionType , objectType ) ) ; } } if ( extractors [ i ] instanceof TypedExtractor ) { TypedExtractor extractor = ( TypedExtractor ) extractors [ i ] ; extractor . setObjectType ( objectType ) ; } if ( extractors [ i ] instanceof NamedExtractor ) { // ensure that the extractor has a proper name
NamedExtractor namedExtractor = ( NamedExtractor ) extractors [ i ] ; if ( Strings . isNullOrEmpty ( namedExtractor . getName ( ) ) ) { // parameter is not named via annotation
// try looking for the parameter name in the compiled . class file
if ( parameter . isNamePresent ( ) ) { namedExtractor . setName ( parameter . getName ( ) ) ; } else { log . error ( "Properly annotate your controller methods OR specify the '-parameters' flag for your Java compiler!" ) ; throw new FatalException ( "Controller method '{}' parameter {} of type '{}' does not specify a name!" , Util . toString ( method ) , i + 1 , Util . toString ( collectionType , objectType ) ) ; } } } }
|
public class CommerceAccountUserRelPersistenceImpl { /** * Returns all the commerce account user rels where commerceAccountUserId = & # 63 ; .
* @ param commerceAccountUserId the commerce account user ID
* @ return the matching commerce account user rels */
@ Override public List < CommerceAccountUserRel > findByCommerceAccountUserId ( long commerceAccountUserId ) { } }
|
return findByCommerceAccountUserId ( commerceAccountUserId , QueryUtil . ALL_POS , QueryUtil . ALL_POS , null ) ;
|
public class CacheConfiguration { /** * 根据K - V配置信息生成缓存配置
* @ param cacheConfigMap 缓存的K - V配置信息
* @ return 缓存配置 */
public static CacheConfiguration build ( ConfigMap < String , String > cacheConfigMap ) { } }
|
if ( cacheConfigMap == null || cacheConfigMap . size ( ) == 0 ) { return new SynchronizedCacheConfiguration ( ) ; } if ( "ENABLE" . equals ( cacheConfigMap . getString ( "cache" , "ENABLE" ) . toUpperCase ( ) ) ) { String cacheAdapter = cacheConfigMap . getString ( "cache.adapter" , "SYN" ) . toUpperCase ( ) ; if ( cacheAdapter . equals ( "SYN" ) ) { return new SynchronizedCacheConfiguration ( cacheConfigMap ) ; } else if ( cacheAdapter . equals ( "EHCACHE" ) ) { return new EhCacheConfiguration ( cacheConfigMap ) ; } else if ( cacheAdapter . equals ( "MEMCACHED" ) ) { return new MemcachedCacheConfiguration ( cacheConfigMap ) ; } } return null ;
|
public class EmailGlobalSettings { /** * Set all settings to the default . This is helpful for testing .
* @ since 3.0.0 */
public static void setToDefault ( ) { } }
|
s_aRWLock . writeLocked ( ( ) -> { s_nMaxMailQueueLen = DEFAULT_MAX_QUEUE_LENGTH ; s_nMaxMailSendCount = DEFAULT_MAX_SEND_COUNT ; s_bUseSSL = DEFAULT_USE_SSL ; s_bUseSTARTTLS = DEFAULT_USE_STARTTLS ; s_nConnectionTimeoutMilliSecs = DEFAULT_CONNECT_TIMEOUT_MILLISECS ; s_nTimeoutMilliSecs = DEFAULT_TIMEOUT_MILLISECS ; s_bDebugSMTP = GlobalDebug . isDebugMode ( ) ; s_aConnectionListeners . clear ( ) ; s_aEmailDataTransportListeners . clear ( ) ; } ) ;
|
public class AbstractAmazonSQSAsync { /** * Simplified method form for invoking the ChangeMessageVisibilityBatch operation with an AsyncHandler .
* @ see # changeMessageVisibilityBatchAsync ( ChangeMessageVisibilityBatchRequest , com . amazonaws . handlers . AsyncHandler ) */
@ Override public java . util . concurrent . Future < ChangeMessageVisibilityBatchResult > changeMessageVisibilityBatchAsync ( String queueUrl , java . util . List < ChangeMessageVisibilityBatchRequestEntry > entries , com . amazonaws . handlers . AsyncHandler < ChangeMessageVisibilityBatchRequest , ChangeMessageVisibilityBatchResult > asyncHandler ) { } }
|
return changeMessageVisibilityBatchAsync ( new ChangeMessageVisibilityBatchRequest ( ) . withQueueUrl ( queueUrl ) . withEntries ( entries ) , asyncHandler ) ;
|
public class FeatureStandardizer { /** * Applies this standardization procedure to the features in
* { @ code featureFactor } .
* @ param featureFactor
* @ return */
public DiscreteFactor apply ( DiscreteFactor featureFactor ) { } }
|
return featureFactor . add ( means . product ( - 1.0 ) ) . product ( inverseStdDevs ) . add ( finalOffset ) . product ( rescalingFactor ) ;
|
public class AbstractCSLToolCommand { /** * Prints out usage information */
protected void usage ( ) { } }
|
String footnotes = null ; if ( ! getOptions ( ) . getCommands ( ) . isEmpty ( ) ) { footnotes = "Use `" + CSLToolContext . current ( ) . getToolName ( ) + " help <command>' to read about a specific command." ; } String name = CSLToolContext . current ( ) . getToolName ( ) ; String usageName = getUsageName ( ) ; if ( usageName != null && ! usageName . isEmpty ( ) ) { name += " " + usageName ; } String unknownArguments = OptionIntrospector . getUnknownArgumentName ( getClassesToIntrospect ( ) ) ; OptionParser . usage ( name , getUsageDescription ( ) , getOptions ( ) , unknownArguments , footnotes , new PrintWriter ( System . out , true ) ) ;
|
public class NotificationEndPoint { /** * Create an operation that will retrieve the given notification end point
* @ param notificationEndPointId
* id of notification end point to retrieve
* @ return the operation */
public static EntityGetOperation < NotificationEndPointInfo > get ( String notificationEndPointId ) { } }
|
return new DefaultGetOperation < NotificationEndPointInfo > ( ENTITY_SET , notificationEndPointId , NotificationEndPointInfo . class ) ;
|
public class HyperionClient { /** * Set the proxy configuration to use for this client .
* @ param proxy The proxy configuration */
public void setProxy ( Proxy proxy ) { } }
|
if ( proxy != null ) { java . net . Proxy p = new java . net . Proxy ( java . net . Proxy . Type . HTTP , new InetSocketAddress ( proxy . getHost ( ) , proxy . getPort ( ) ) ) ; client . setProxy ( p ) ; } else client . setProxy ( java . net . Proxy . NO_PROXY ) ;
|
public class TrainingsImpl { /** * Get information about a specific tag .
* @ param projectId The project this tag belongs to
* @ param tagId The tag id
* @ param iterationId The iteration to retrieve this tag from . Optional , defaults to current training set
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the observable to the Tag object */
public Observable < ServiceResponse < Tag > > getTagWithServiceResponseAsync ( UUID projectId , UUID tagId , UUID iterationId ) { } }
|
if ( projectId == null ) { throw new IllegalArgumentException ( "Parameter projectId is required and cannot be null." ) ; } if ( tagId == null ) { throw new IllegalArgumentException ( "Parameter tagId is required and cannot be null." ) ; } if ( this . client . apiKey ( ) == null ) { throw new IllegalArgumentException ( "Parameter this.client.apiKey() is required and cannot be null." ) ; } return service . getTag ( projectId , tagId , iterationId , this . client . apiKey ( ) , this . client . acceptLanguage ( ) , this . client . userAgent ( ) ) . flatMap ( new Func1 < Response < ResponseBody > , Observable < ServiceResponse < Tag > > > ( ) { @ Override public Observable < ServiceResponse < Tag > > call ( Response < ResponseBody > response ) { try { ServiceResponse < Tag > clientResponse = getTagDelegate ( response ) ; return Observable . just ( clientResponse ) ; } catch ( Throwable t ) { return Observable . error ( t ) ; } } } ) ;
|
public class InvoiceReportPdf { /** * < p > Evaluate I18N overseas sales invoice lines query . < / p >
* @ param pItsOwnerId ID of sales invoice
* @ param pLang lang
* @ return query
* @ throws Exception - an exception */
public final String evalSalesInvOverseaseLinesSql ( final String pItsOwnerId , final String pLang ) throws Exception { } }
|
if ( this . salesInvOverseaseLinesSql == null ) { synchronized ( this ) { if ( this . salesInvOverseaseLinesSql == null ) { String flName = "/accounting/trade/salesInvOverseaseLines.sql" ; this . salesInvOverseaseLinesSql = loadString ( flName ) ; } } } String query = this . salesInvOverseaseLinesSql . replace ( ":ITSOWNER" , pItsOwnerId ) . replace ( ":LANG" , pLang ) ; return query ;
|
public class MsgMgr { /** * Tell the user of an error , and probably throw an
* exception .
* @ param msg Message text to issue
* @ throws XSLProcessorException thrown if the active ProblemListener and XPathContext decide
* the error condition is severe enough to halt processing .
* @ throws TransformerException
* @ xsl . usage internal */
public void error ( SourceLocator srcLctr , String msg ) throws TransformerException { } }
|
error ( srcLctr , null , null , msg , null ) ;
|
public class ZoomableGraphicsContext { /** * Sets the current stroke line dash pattern in meters to a normalized copy of
* the argument .
* The default value is { @ code null } .
* The line dash array is a stroke attribute
* used for any of the stroke methods as specified in the
* Rendering Attributes Table of { @ link GraphicsContext } .
* If the array is { @ code null } or empty or contains all { @ code 0 } elements
* then dashing will be disabled and the current dash array will be set
* to { @ code null } .
* If any of the elements of the array are a negative , infinite , or NaN
* value outside the range { @ code [ 0 , + inf ) } then the entire array will
* be ignored and the current dash array will remain unchanged .
* If the array is an odd length then it will be treated as if it
* were two copies of the array appended to each other .
* @ param dashes the array of finite non - negative dash lengths
* @ see # setLineDashesInPixels ( double . . . ) */
public void setLineDashesInMeters ( double ... dashes ) { } }
|
final double [ ] newDashes ; if ( dashes == null ) { newDashes = dashes ; } else { newDashes = new double [ dashes . length ] ; for ( int i = 0 ; i < newDashes . length ; ++ i ) { newDashes [ i ] = doc2fxSize ( dashes [ i ] ) ; } } setLineDashesInPixels ( newDashes ) ;
|
public class MaterialCollapsibleItem { /** * Make this item active . */
@ Override public void setActive ( boolean active ) { } }
|
this . active = active ; if ( parent != null ) { fireCollapsibleHandler ( ) ; removeStyleName ( CssName . ACTIVE ) ; if ( header != null ) { header . removeStyleName ( CssName . ACTIVE ) ; } if ( active ) { if ( parent != null && parent . isAccordion ( ) ) { parent . clearActive ( ) ; } addStyleName ( CssName . ACTIVE ) ; if ( header != null ) { header . addStyleName ( CssName . ACTIVE ) ; } } if ( body != null ) { body . setDisplay ( active ? Display . BLOCK : Display . NONE ) ; } } else { GWT . log ( "Please make sure that the Collapsible parent is attached or existed." , new IllegalStateException ( ) ) ; }
|
public class GenericCollectionTypeResolver { /** * Returns the source class for the specified type .
* This is either < code > Collection . class < / code > or < code > Map . class < / code > .
* If none of both matches the baseclass of the specified type is returned .
* @ param type the type to determine the source class for
* @ return the source class */
private static Class getSourceClass ( Type type ) { } }
|
Class sourceClass = toClass ( type ) ; if ( Collection . class . isAssignableFrom ( sourceClass ) ) { return Collection . class ; } else if ( Map . class . isAssignableFrom ( sourceClass ) ) { return Map . class ; } else { return sourceClass ; }
|
public class KernelRunner { /** * Helper method that manages the memory allocation for storing the kernel argument data ,
* so that the data can be exchanged between the host and the OpenCL device .
* @ param arg the kernel argument
* @ param newRef the actual Java data instance
* @ param objArraySize the number of elements in the Java array
* @ param totalStructSize the size of each target array element
* @ param totalBufferSize the total buffer size including memory alignment
* @ return < ul > < li > true , if internal buffer had to be allocated or reallocated holding the data < / li >
* < li > false , if buffer didn ' t change and is already allocated < / li > < / ul > */
public boolean allocateArrayBufferIfFirstTimeOrArrayChanged ( KernelArg arg , Object newRef , final int objArraySize , final int totalStructSize , final int totalBufferSize ) { } }
|
boolean didReallocate = false ; if ( ( arg . getObjArrayBuffer ( ) == null ) || ( newRef != arg . getArray ( ) ) ) { final ByteBuffer structBuffer = ByteBuffer . allocate ( totalBufferSize ) ; arg . setObjArrayByteBuffer ( structBuffer . order ( ByteOrder . LITTLE_ENDIAN ) ) ; arg . setObjArrayBuffer ( arg . getObjArrayByteBuffer ( ) . array ( ) ) ; didReallocate = true ; if ( logger . isLoggable ( Level . FINEST ) ) { logger . finest ( "objArraySize = " + objArraySize + " totalStructSize= " + totalStructSize + " totalBufferSize=" + totalBufferSize ) ; } } else { arg . getObjArrayByteBuffer ( ) . clear ( ) ; } return didReallocate ;
|
public class ParaClient { /** * Checks if a given object is linked to this one .
* @ param toObj the other object
* @ param obj the object to execute this method on
* @ return true if linked */
public boolean isLinked ( ParaObject obj , ParaObject toObj ) { } }
|
if ( obj == null || obj . getId ( ) == null || toObj == null || toObj . getId ( ) == null ) { return false ; } return isLinked ( obj , toObj . getType ( ) , toObj . getId ( ) ) ;
|
public class PluginManager { /** * Registers event listeners with the specified plugin properties , class loader and plugin .
* @ param props the specified plugin properties
* @ param classLoader the specified class loader
* @ param plugin the specified plugin
* @ throws Exception exception */
private void registerEventListeners ( final Properties props , final URLClassLoader classLoader , final AbstractPlugin plugin ) throws Exception { } }
|
final String eventListenerClasses = props . getProperty ( Plugin . PLUGIN_EVENT_LISTENER_CLASSES ) ; final String [ ] eventListenerClassArray = eventListenerClasses . split ( "," ) ; for ( final String eventListenerClassName : eventListenerClassArray ) { if ( StringUtils . isBlank ( eventListenerClassName ) ) { LOGGER . log ( Level . INFO , "No event listener to load for plugin[name={0}]" , plugin . getName ( ) ) ; return ; } LOGGER . log ( Level . DEBUG , "Loading event listener[className={0}]" , eventListenerClassName ) ; final Class < ? > eventListenerClass = classLoader . loadClass ( eventListenerClassName ) ; final AbstractEventListener < ? > eventListener = ( AbstractEventListener ) eventListenerClass . newInstance ( ) ; plugin . addEventListener ( eventListener ) ; LOGGER . log ( Level . DEBUG , "Registered event listener[class={0}, eventType={1}] for plugin[name={2}]" , eventListener . getClass ( ) , eventListener . getEventType ( ) , plugin . getName ( ) ) ; }
|
public class CampaignEstimateRequest { /** * Gets the dailyBudget value for this CampaignEstimateRequest .
* @ return dailyBudget * Daily campaign budget to use in traffic estimation . If not
* specified ,
* the daily budget is unlimited . */
public com . google . api . ads . adwords . axis . v201809 . cm . Money getDailyBudget ( ) { } }
|
return dailyBudget ;
|
public class MapEntryLite { /** * Parses an entry off of the input into the map . This helper avoids allocaton of a { @ link MapEntryLite } by parsing
* directly into the provided { @ link MapFieldLite } .
* @ param map the map
* @ param input the input
* @ param extensionRegistry the extension registry
* @ throws IOException Signals that an I / O exception has occurred . */
public void parseInto ( MapFieldLite < K , V > map , CodedInputStream input , ExtensionRegistryLite extensionRegistry ) throws IOException { } }
|
int length = input . readRawVarint32 ( ) ; final int oldLimit = input . pushLimit ( length ) ; K key = metadata . defaultKey ; V value = metadata . defaultValue ; while ( true ) { int tag = input . readTag ( ) ; if ( tag == 0 ) { break ; } if ( tag == CodedConstant . makeTag ( KEY_FIELD_NUMBER , metadata . keyType . getWireType ( ) ) ) { key = parseField ( input , extensionRegistry , metadata . keyType , key ) ; } else if ( tag == CodedConstant . makeTag ( VALUE_FIELD_NUMBER , metadata . valueType . getWireType ( ) ) ) { value = parseField ( input , extensionRegistry , metadata . valueType , value ) ; } else { if ( ! input . skipField ( tag ) ) { break ; } } } input . checkLastTagWas ( 0 ) ; input . popLimit ( oldLimit ) ; map . put ( key , value ) ;
|
public class Events { /** * Takes a { @ link CouchbaseEvent } and returns a map with event information .
* @ param source the source event .
* @ return a new map which contains name and type info in an event sub - map . */
public static Map < String , Object > identityMap ( CouchbaseEvent source ) { } }
|
Map < String , Object > root = new HashMap < String , Object > ( ) ; Map < String , String > event = new HashMap < String , String > ( ) ; event . put ( "name" , source . getClass ( ) . getSimpleName ( ) . replaceAll ( "Event$" , "" ) ) ; event . put ( "type" , source . type ( ) . toString ( ) ) ; root . put ( "event" , event ) ; return root ;
|
public class FileUtil { /** * Renames the specified file .
* If the destination is a directory ( and the source is not ) , the source
* file is simply moved to the destination directory .
* @ param pFrom The file to rename
* @ param pTo The new file
* @ param pOverWrite Specifies if the tofile should be overwritten , if it
* exists
* @ return { @ code true } , if the file was renamed .
* @ throws FileNotFoundException if { @ code pFrom } does not exist . */
public static boolean rename ( File pFrom , File pTo , boolean pOverWrite ) throws IOException { } }
|
if ( ! pFrom . exists ( ) ) { throw new FileNotFoundException ( pFrom . getAbsolutePath ( ) ) ; } if ( pFrom . isFile ( ) && pTo . isDirectory ( ) ) { pTo = new File ( pTo , pFrom . getName ( ) ) ; } return ( pOverWrite || ! pTo . exists ( ) ) && pFrom . renameTo ( pTo ) ;
|
public class GraphIterationElement { /** * Compare the two given segments .
* @ param firstSegment the first segment to compare to the second segment .
* @ param secondSegment the second segment to compare to the first segment .
* @ return < code > - 1 < / code > if { @ code firstSegment } is lower than { @ code secondSegment } ,
* < code > 1 < / code > if { @ code firstSegment } is greater than { @ code secondSegment } ,
* or < code > 0 < / code > if { @ code firstSegment } is equal to { @ code secondSegment } . */
@ Pure @ SuppressWarnings ( { } }
|
"unchecked" , "rawtypes" , "static-method" } ) protected int compareSegments ( GraphSegment < ? , ? > firstSegment , GraphSegment < ? , ? > secondSegment ) { if ( firstSegment instanceof Comparable ) { try { return ( ( Comparable ) firstSegment ) . compareTo ( secondSegment ) ; } catch ( AssertionError e ) { throw e ; } catch ( Throwable e ) { } } return firstSegment . hashCode ( ) - secondSegment . hashCode ( ) ;
|
public class InjectionHelper { /** * This returns the type of the injection being requested based on either the
* annotated field or annotated method . */
static Class < ? > getTypeFromMember ( Member member ) throws InjectionException { } }
|
Class < ? > memberType = null ; if ( member instanceof Field ) { memberType = ( ( Field ) member ) . getType ( ) ; } else if ( member instanceof Method ) { Method method = ( Method ) member ; if ( method . getParameterTypes ( ) == null || method . getParameterTypes ( ) . length != 1 ) { String msg = Tr . formatMessage ( tc , "error.service.ref.member.level.annotation.wrong.method.name" , method . getName ( ) , method . getDeclaringClass ( ) . getName ( ) ) ; throw new InjectionException ( msg ) ; } memberType = method . getParameterTypes ( ) [ 0 ] ; } return memberType ;
|
public class CircularImageView { /** * Sets the placeholder text .
* @ param text */
public final void setPlaceholder ( String text ) { } }
|
if ( ! text . equalsIgnoreCase ( mText ) ) { setPlaceholderTextInternal ( text , mTextColor , mTextSize , true ) ; }
|
public class CmsHistoryRow { /** * Gets the last modification user . < p >
* @ return the last modification user */
@ Column ( header = org . opencms . workplace . commons . Messages . GUI_LABEL_USER_LAST_MODIFIED_0 , order = 70 ) public String getUserLastModified ( ) { } }
|
return m_bean . getUserLastModified ( ) ;
|
public class StreamUtil { /** * Collects a stream to a LinkedHashMap .
* @ param keyMapper function to deal with keys .
* @ param valueMapper function to deal with values .
* @ param mergeFunction function to apply one values if keyMapper produces same key multiple times .
* @ param < T > type of element in input stream .
* @ param < K > type of key for created map .
* @ param < U > type of value for created map .
* @ return map with a key / value for each item in the stream . */
public static < T , K , U > Collector < T , ? , LinkedHashMap < K , U > > toLinkedMap ( Function < ? super T , ? extends K > keyMapper , Function < ? super T , ? extends U > valueMapper , BinaryOperator < U > mergeFunction ) { } }
|
return Collectors . toMap ( keyMapper , valueMapper , mergeFunction , LinkedHashMap :: new ) ;
|
public class BrowserUtils { /** * Returns the command to execute to open the specified URL , according to the
* current OS .
* @ param url the url to open
* @ return the command to execute to open the url with the default browser
* @ throws java . io . IOException if an I / O exception occurred while locating the browser
* @ throws InterruptedException if the thread is interrupted while locating the browser */
private static String [ ] getOpenBrowserCommand ( String url ) throws IOException , InterruptedException { } }
|
if ( IS_WINDOWS ) { return new String [ ] { "rundll32" , "url.dll,FileProtocolHandler" , url } ; } else if ( IS_MAC ) { return new String [ ] { "/usr/bin/open" , url } ; } else if ( IS_LINUX ) { String [ ] browsers = { "google-chrome" , "firefox" , "opera" , "konqueror" , "epiphany" , "mozilla" , "netscape" } ; for ( String browser : browsers ) { if ( Runtime . getRuntime ( ) . exec ( new String [ ] { "which" , browser } ) . waitFor ( ) == 0 ) { return new String [ ] { browser , url } ; } } throw new UnsupportedOperationException ( "Cannot find a browser" ) ; } else { throw new UnsupportedOperationException ( "Opening browser is not implemented for your OS (" + OS_NAME + ")" ) ; }
|
public class CcgUnaryRule { /** * Parses a unary rule from a line in comma - separated format . The
* expected fields , in order , are :
* < ul >
* < li > The headed syntactic categories to combine and return :
* < code > ( input syntax ) ( return syntax ) < / code >
* < li > ( optional ) Additional unfilled dependencies , in standard
* format :
* < code > ( predicate ) ( argument number ) ( argument variable ) < / code >
* < / ul >
* For example , " NP { 0 } S { 1 } / ( S { 1 } \ NP { 0 } ) { 1 } " is a unary type - raising
* rule that allows an NP to combine with an adjacent verb .
* @ param line
* @ return */
public static CcgUnaryRule parseFrom ( String line ) { } }
|
String [ ] chunks = new CsvParser ( CsvParser . DEFAULT_SEPARATOR , CsvParser . DEFAULT_QUOTE , CsvParser . NULL_ESCAPE ) . parseLine ( line . trim ( ) ) ; Preconditions . checkArgument ( chunks . length >= 1 , "Illegal unary rule string: %s" , line ) ; String [ ] syntacticParts = chunks [ 0 ] . split ( " " ) ; Preconditions . checkArgument ( syntacticParts . length == 2 , "Illegal unary rule string: %s" , line ) ; HeadedSyntacticCategory inputSyntax = HeadedSyntacticCategory . parseFrom ( syntacticParts [ 0 ] ) ; HeadedSyntacticCategory returnSyntax = HeadedSyntacticCategory . parseFrom ( syntacticParts [ 1 ] ) ; // Ensure that the return syntactic type is in canonical form .
HeadedSyntacticCategory returnCanonical = returnSyntax . getCanonicalForm ( ) ; int [ ] originalToCanonical = returnSyntax . unifyVariables ( returnSyntax . getUniqueVariables ( ) , returnCanonical , new int [ 0 ] ) ; int [ ] inputVars = inputSyntax . getUniqueVariables ( ) ; int [ ] inputRelabeling = new int [ inputVars . length ] ; int [ ] returnOriginalVars = returnSyntax . getUniqueVariables ( ) ; int nextUnassignedVar = Ints . max ( returnCanonical . getUniqueVariables ( ) ) + 1 ; for ( int i = 0 ; i < inputVars . length ; i ++ ) { int index = Ints . indexOf ( returnOriginalVars , inputVars [ i ] ) ; if ( index != - 1 ) { inputRelabeling [ i ] = originalToCanonical [ index ] ; } else { inputRelabeling [ i ] = nextUnassignedVar ; nextUnassignedVar ++ ; } } HeadedSyntacticCategory relabeledInput = inputSyntax . relabelVariables ( inputVars , inputRelabeling ) ; Expression2 logicalForm = null ; if ( chunks . length >= 2 && chunks [ 1 ] . trim ( ) . length ( ) > 0 ) { logicalForm = ExpressionParser . expression2 ( ) . parse ( chunks [ 1 ] ) ; } if ( chunks . length >= 3 ) { throw new UnsupportedOperationException ( "Using unfilled dependencies with unary CCG rules is not yet implemented" ) ; /* * String [ ] newDeps = chunks [ 4 ] . split ( " " ) ;
* Preconditions . checkArgument ( newDeps . length = = 3 ) ; long
* subjectNum = Long . parseLong ( newDeps [ 0 ] . substring ( 1 ) ) ; long
* argNum = Long . parseLong ( newDeps [ 1 ] ) ; long objectNum =
* Long . parseLong ( newDeps [ 2 ] . substring ( 1 ) ) ; unfilledDeps = new
* long [ 1 ] ;
* unfilledDeps [ 0 ] =
* CcgParser . marshalUnfilledDependency ( objectNum , argNum ,
* subjectNum , 0 , 0 ) ; */
} return new CcgUnaryRule ( relabeledInput , returnCanonical , logicalForm ) ;
|
public class HashFunctions { /** * Robert Jenkins ' 96 bit Mix Function . < p / > Variable ' c ' contains the input key . When the mixing is complete ,
* variable ' c ' also contains the hash result . Variable ' a ' , and ' b ' contain initialized random bits . Notice the
* total number of internal state is 96 bits , much larger than the final output of 32 bits . Also notice the sequence
* of subtractions rolls through variable ' a ' to variable ' c ' three times . Each row will act on one variable , mixing
* in information from the other two variables , followed by a shift operation . < p / > < p > Subtraction is similar to
* multiplication in that changes in upper bits of the key do not influence lower bits of the addition . The 9 bit
* shift operations in Robert Jenkins ' mixing algorithm shifts the key to the right 61 bits in total , and shifts the
* key to the left 34 bits in total . As the calculation is chained , each exclusive - or doubles the number of states .
* There are at least 2 ^ 9 different combined versions of the original key , shifted by various amounts . That is why a
* single bit change in the key can influence widely apart bits in the hash result . < p / > < p > The uniform distribution
* of the hash function can be determined from the nature of the subtraction operation . Look at a single bit
* subtraction operation between a key , and a random bit . If the random bit is 0 , then the key remains unchanged . If
* the random bit is 1 , then the key will be flipped . A carry will occur in the case where both the key bit and the
* random bit are 1 . Subtracting the random bits will cause about half of the key bits to be flipped . So even if the
* key is not uniform , subtracting the random bits will result in uniform distribution . < p / > < h3 > Links : < / h3 > < a
* href = " http : / / www . concentric . net / ~ ttwang / tech / inthash . htm " > http : / / www . concentric . net / ~ ttwang / tech / inthash . htm < / a > < br / >
* < a href = " http : / / www . burtleburtle . net / bob / hash / doobs . html " > http : / / www . burtleburtle . net / bob / hash / doobs . html < / a > < br / >
* < a href = " http : / / www . isthe . com / chongo / tech / comp / fnv / " > http : / / www . isthe . com / chongo / tech / comp / fnv / < / a > < br / > < a
* href = " http : / / en . wikipedia . org / wiki / Jenkins _ hash _ function ' s " > http : / / en . wikipedia . org / wiki / Jenkins _ hash _ function ' s < / a > < br / >
* < a href = " http : / / en . wikipedia . org / wiki / Fowler % E2%80%93Noll % E2%80%93Vo _ hash _ function " > http : / / en . wikipedia . org / wiki / Fowler % E2%80%93Noll % E2%80%93Vo _ hash _ function < / a > < br / >
* @ param a initialized random bits
* @ param b initialized random bits
* @ param c key to be hashed
* @ return randomized c bits ( hashed c ) */
public static int mix ( int a , int b , int c ) { } }
|
a = a - b ; a = a - c ; a = a ^ ( c >>> 13 ) ; b = b - c ; b = b - a ; b = b ^ ( a << 8 ) ; c = c - a ; c = c - b ; c = c ^ ( b >>> 13 ) ; a = a - b ; a = a - c ; a = a ^ ( c >>> 12 ) ; b = b - c ; b = b - a ; b = b ^ ( a << 16 ) ; c = c - a ; c = c - b ; c = c ^ ( b >>> 5 ) ; a = a - b ; a = a - c ; a = a ^ ( c >>> 3 ) ; b = b - c ; b = b - a ; b = b ^ ( a << 10 ) ; c = c - a ; c = c - b ; c = c ^ ( b >>> 15 ) ; return c ;
|
public class PropertiesUtil { /** * Returns the specified property from the supplied properties object .
* @ throws MissingPropertyException with the supplied message if the property does not exist or
* is the empty string . */
public static String requireProperty ( Properties props , String key , String missingMessage ) { } }
|
String value = props . getProperty ( key ) ; if ( StringUtil . isBlank ( value ) ) { throw new MissingPropertyException ( key , missingMessage ) ; } return value ;
|
public class LibertyTransactionService { /** * Called by DS to activate this service
* @ param compcontext the context of this component */
protected void activate ( ComponentContext compcontext ) { } }
|
if ( TC . isDebugEnabled ( ) ) { Tr . debug ( TC , "Activating " + this . getClass ( ) . getName ( ) ) ; } this . userTransaction . activate ( compcontext ) ; this . transactionManager . activate ( compcontext ) ;
|
public class JsiiEngine { /** * Invokes a local callback and returns the result / error .
* @ param callback The callback to invoke .
* @ return The return value
* @ throws JsiiException if the callback failed . */
public JsonNode handleCallback ( final Callback callback ) { } }
|
if ( callback . getInvoke ( ) != null ) { return invokeCallbackMethod ( callback . getInvoke ( ) , callback . getCookie ( ) ) ; } else if ( callback . getGet ( ) != null ) { return invokeCallbackGet ( callback . getGet ( ) ) ; } else if ( callback . getSet ( ) != null ) { return invokeCallbackSet ( callback . getSet ( ) ) ; } throw new JsiiException ( "Unrecognized callback type: get/set/invoke" ) ;
|
public class DestinationInfoMarshaller { /** * Marshall the given parameter object . */
public void marshall ( DestinationInfo destinationInfo , ProtocolMarshaller protocolMarshaller ) { } }
|
if ( destinationInfo == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( destinationInfo . getId ( ) , ID_BINDING ) ; protocolMarshaller . marshall ( destinationInfo . getService ( ) , SERVICE_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
|
public class CouchDbClient { /** * < p > Performs a HTTP HEAD request . < / p >
* < p > The stream < b > must < / b > be closed after use . < / p >
* @ return { @ link Response } */
InputStream head ( URI uri ) { } }
|
HttpConnection connection = Http . HEAD ( uri ) ; return executeToInputStream ( connection ) ;
|
public class SerializationUtil { /** * Get the appropriate { @ link ISchemaVersion schema version } for the specified
* serialized HLL .
* @ param bytes the serialized HLL whose schema version is desired .
* @ return the schema version for the specified HLL . This will never
* be < code > null < / code > . */
public static ISchemaVersion getSchemaVersion ( final byte [ ] bytes ) { } }
|
final byte versionByte = bytes [ 0 ] ; final int schemaVersionNumber = schemaVersion ( versionByte ) ; return getSchemaVersion ( schemaVersionNumber ) ;
|
public class ARM { /** * Mines the association rules . The discovered rules will be printed out
* to the provided stream .
* @ param confidence the confidence threshold for association rules .
* @ return the number of discovered association rules . */
public long learn ( double confidence , PrintStream out ) { } }
|
long n = 0 ; ttree = fim . buildTotalSupportTree ( ) ; for ( int i = 0 ; i < ttree . root . children . length ; i ++ ) { if ( ttree . root . children [ i ] != null ) { int [ ] itemset = { ttree . root . children [ i ] . id } ; n += learn ( out , null , itemset , i , ttree . root . children [ i ] , confidence ) ; } } return n ;
|
public class JsonElement { /** * Wraps the given object if to JsonXXX object . */
public static JsonElement wrap ( Object o ) throws JsonException { } }
|
if ( o == null ) { // null value means not specified i . e . - > no valued will be mapped
// Json . null is specific value
return null ; } if ( o instanceof JsonElement ) { return ( JsonElement ) o ; } if ( o instanceof ElementWrapper ) { return ( ( ElementWrapper ) o ) . getJson ( ) ; } if ( o instanceof Collection ) { return new JsonArray ( ( Collection ) o ) ; } else if ( o . getClass ( ) . isArray ( ) ) { return new JsonArray ( o ) ; } if ( o instanceof Map ) { return new JsonObject ( ( Map ) o ) ; } if ( o instanceof Boolean ) { return new JsonBoolean ( ( Boolean ) o ) ; } if ( o instanceof Number ) { return new JsonNumber ( ( Number ) o ) ; } if ( o instanceof String ) { return new JsonString ( ( String ) o ) ; } if ( o instanceof Character ) { return new JsonString ( Character . toString ( ( Character ) o ) ) ; } if ( o instanceof ByteBuffer ) { return new JsonString ( ( ( ByteBuffer ) o ) . asCharBuffer ( ) . toString ( ) ) ; } return new JsonString ( o . toString ( ) ) ;
|
public class CloneCommand { /** * Checks if a config update message is necessary and sends it if it is .
* @ param site1 The source site for the configuration .
* @ param site2 The target site for the configuration .
* @ param site1Status The status of the source site .
* @ param site2Status The status of the target site .
* @ throws Exception */
private void checkSendUpdateConfigMessage ( String site1 , String site2 , Status site1Status , Status site2Status ) throws Exception { } }
|
String repo ; String revision ; String branch ; if ( includeConfig ) { repo = site1Status . getConfigRepo ( ) ; revision = site1Status . getConfigRevision ( ) ; branch = site1Status . getConfigBranch ( ) ; if ( ! StringUtils . isEmptyOrNull ( repo ) && ! StringUtils . isEmptyOrNull ( revision ) && ! StringUtils . isEmptyOrNull ( branch ) ) { if ( ! repo . equals ( site2Status . getConfigRepo ( ) ) || ! revision . equals ( site2Status . getConfigRevision ( ) ) || ! branch . equals ( site2Status . getConfigBranch ( ) ) ) { System . out . println ( "Sending update/config message to [" + site2 + "]" ) ; UpdateCommand . sendUpdateMessage ( site2 , repo , branch , revision , "Cloned config from [" + site1 + "]: " + comment , token , UpdateConfigCommand . UPDATE_CONFIG_ENDPOINT , UpdateRequest . CONFIG_BRANCH_PREFIX ) ; } else { System . out . println ( "Source [" + site1 + "] is on the same configuration repo, branch, and revision as the target [" + site2 + "]." ) ; } } else { System . out . println ( "Configuration status not available from source site [" + site1 + "]" ) ; } }
|
public class Reflections { /** * get types annotated with a given annotation , both classes and annotations , including annotation member values matching
* < p > { @ link java . lang . annotation . Inherited } is honored according to given honorInherited
* < p / > depends on TypeAnnotationsScanner configured */
public Set < Class < ? > > getTypesAnnotatedWith ( final Annotation annotation , boolean honorInherited ) { } }
|
Iterable < String > annotated = store . get ( index ( TypeAnnotationsScanner . class ) , annotation . annotationType ( ) . getName ( ) ) ; Iterable < Class < ? > > filter = filter ( forNames ( annotated , loaders ( ) ) , withAnnotation ( annotation ) ) ; Iterable < String > classes = getAllAnnotated ( names ( filter ) , annotation . annotationType ( ) . isAnnotationPresent ( Inherited . class ) , honorInherited ) ; return Sets . newHashSet ( concat ( filter , forNames ( filter ( classes , not ( in ( Sets . newHashSet ( annotated ) ) ) ) , loaders ( ) ) ) ) ;
|
public class OfflineDataUploadPage { /** * Gets the entries value for this OfflineDataUploadPage .
* @ return entries */
public com . google . api . ads . adwords . axis . v201809 . rm . OfflineDataUpload [ ] getEntries ( ) { } }
|
return entries ;
|
public class Integer { /** * Returns the number of zero bits preceding the highest - order
* ( " leftmost " ) one - bit in the two ' s complement binary representation
* of the specified { @ code int } value . Returns 32 if the
* specified value has no one - bits in its two ' s complement representation ,
* in other words if it is equal to zero .
* < p > Note that this method is closely related to the logarithm base 2.
* For all positive { @ code int } values x :
* < ul >
* < li > floor ( log < sub > 2 < / sub > ( x ) ) = { @ code 31 - numberOfLeadingZeros ( x ) }
* < li > ceil ( log < sub > 2 < / sub > ( x ) ) = { @ code 32 - numberOfLeadingZeros ( x - 1 ) }
* < / ul >
* @ param i the value whose number of leading zeros is to be computed
* @ return the number of zero bits preceding the highest - order
* ( " leftmost " ) one - bit in the two ' s complement binary representation
* of the specified { @ code int } value , or 32 if the value
* is equal to zero .
* @ since 1.5 */
public static int numberOfLeadingZeros ( int i ) { } }
|
// HD , Figure 5-6
if ( i == 0 ) return 32 ; int n = 1 ; if ( i >>> 16 == 0 ) { n += 16 ; i <<= 16 ; } if ( i >>> 24 == 0 ) { n += 8 ; i <<= 8 ; } if ( i >>> 28 == 0 ) { n += 4 ; i <<= 4 ; } if ( i >>> 30 == 0 ) { n += 2 ; i <<= 2 ; } n -= i >>> 31 ; return n ;
|
public class TargetStreamControl { /** * / * ( non - Javadoc )
* @ see com . ibm . ws . sib . processor . runtime . SIMPDeliveryStreamReceiverControllable # getQueuedMessageIterator */
public SIMPIterator getReceivedMessageIterator ( int maxMsgs ) { } }
|
List receivedMessages = new LinkedList ( ) ; Iterator < MessageItem > streamMessages = _targetStream . getAllMessagesOnStream ( ) . iterator ( ) ; boolean allMsgs = ( maxMsgs == SIMPConstants . SIMPCONTROL_RETURN_ALL_MESSAGES ) ; int index = 0 ; while ( ( allMsgs || ( index < maxMsgs ) ) && streamMessages . hasNext ( ) ) { MessageItem msgItem = ( MessageItem ) streamMessages . next ( ) ; SIMPReceivedMessageControllable receivedMessage = new LinkReceivedMessageControl ( msgItem , _tsm . getDestinationHandler ( ) . getMessageProcessor ( ) , _tsm . getDestinationHandler ( ) ) ; receivedMessages . add ( receivedMessage ) ; index ++ ; } return new BasicSIMPIterator ( receivedMessages . iterator ( ) ) ;
|
public class EmvCard { /** * Method used to get the field holderFirstname
* @ return the holderFirstname */
public String getHolderFirstname ( ) { } }
|
String ret = holderFirstname ; if ( ret == null && track1 != null ) { ret = track1 . getHolderFirstname ( ) ; } return ret ;
|
public class MutablePropertySources { /** * Remove and return the property source with the given name , { @ code null } if
* not found .
* @ param name the name of the property source to find and remove */
public PropertySource < ? > remove ( String name ) { } }
|
if ( logger . isDebugEnabled ( ) ) { logger . debug ( "Removing PropertySource '" + name + "'" ) ; } int index = this . propertySourceList . indexOf ( PropertySource . named ( name ) ) ; return ( index != - 1 ? this . propertySourceList . remove ( index ) : null ) ;
|
public class Help { /** * Return a description of a specific command . Uses a layout inspired by * nix man pages . */
private CharSequence documentCommand ( String command ) { } }
|
MethodTarget methodTarget = commandRegistry . listCommands ( ) . get ( command ) ; if ( methodTarget == null ) { throw new IllegalArgumentException ( "Unknown command '" + command + "'" ) ; } AttributedStringBuilder result = new AttributedStringBuilder ( ) . append ( "\n\n" ) ; List < ParameterDescription > parameterDescriptions = getParameterDescriptions ( methodTarget ) ; // NAME
documentCommandName ( result , command , methodTarget . getHelp ( ) ) ; // SYNOPSYS
documentSynopsys ( result , command , parameterDescriptions ) ; // OPTIONS
documentOptions ( result , parameterDescriptions ) ; // ALSO KNOWN AS
documentAliases ( result , command , methodTarget ) ; // AVAILABILITY
documentAvailability ( result , methodTarget ) ; result . append ( "\n" ) ; return result ;
|
public class GenericsResolutionUtils { /** * Analyze super class generics ( relative to provided type ) . Class may not declare generics for super class
* ( { @ code Some extends Base } where { @ code class Base < T > } ) and , in this case , parent class generics could
* be resolved only by upper bound . Note that parent type analysis must be performed only when generics
* for perent type are not known ahead of time ( inlying resolution cases ) .
* @ param type type to analyze parent class for
* @ param generics known type generics
* @ return resolved parent class generics */
private static LinkedHashMap < String , Type > analyzeParent ( final Class type , final Map < String , Type > generics ) { } }
|
LinkedHashMap < String , Type > res = null ; final Class parent = type . getSuperclass ( ) ; if ( ! type . isInterface ( ) && parent != null && parent != Object . class && type . getGenericSuperclass ( ) instanceof ParameterizedType ) { res = resolveGenerics ( type . getGenericSuperclass ( ) , generics ) ; } else if ( parent != null && parent . getTypeParameters ( ) . length > 0 ) { // root class didn ' t declare generics
res = resolveRawGenerics ( parent ) ; } return res == null ? EmptyGenericsMap . getInstance ( ) : res ;
|
public class SoapAttachmentParser { /** * Parse the attachment element with all children and attributes .
* @ param attachmentElement */
public static SoapAttachment parseAttachment ( Element attachmentElement ) { } }
|
SoapAttachment soapAttachment = new SoapAttachment ( ) ; if ( attachmentElement . hasAttribute ( "content-id" ) ) { soapAttachment . setContentId ( attachmentElement . getAttribute ( "content-id" ) ) ; } if ( attachmentElement . hasAttribute ( "content-type" ) ) { soapAttachment . setContentType ( attachmentElement . getAttribute ( "content-type" ) ) ; } if ( attachmentElement . hasAttribute ( "charset-name" ) ) { soapAttachment . setCharsetName ( attachmentElement . getAttribute ( "charset-name" ) ) ; } if ( attachmentElement . hasAttribute ( "mtom-inline" ) ) { soapAttachment . setMtomInline ( Boolean . parseBoolean ( attachmentElement . getAttribute ( "mtom-inline" ) ) ) ; } if ( attachmentElement . hasAttribute ( "encoding-type" ) ) { soapAttachment . setEncodingType ( attachmentElement . getAttribute ( "encoding-type" ) ) ; } Element attachmentDataElement = DomUtils . getChildElementByTagName ( attachmentElement , "data" ) ; if ( attachmentDataElement != null ) { soapAttachment . setContent ( DomUtils . getTextValue ( attachmentDataElement ) ) ; } Element attachmentResourceElement = DomUtils . getChildElementByTagName ( attachmentElement , "resource" ) ; if ( attachmentResourceElement != null ) { soapAttachment . setContentResourcePath ( attachmentResourceElement . getAttribute ( "file" ) ) ; } return soapAttachment ;
|
public class Network { /** * Create connections between a single switch and multiple nodes
* @ param bandwidth the maximal bandwidth for the connection
* @ param sw the switch to connect
* @ param nodes a list of nodes to connect
* @ return a list of links */
public List < Link > connect ( int bandwidth , Switch sw , Node ... nodes ) { } }
|
List < Link > l = new ArrayList < > ( ) ; for ( Node n : nodes ) { l . add ( connect ( bandwidth , sw , n ) ) ; } return l ;
|
public class CustomerGoodsSeen { /** * < p > Usually it ' s simple setter for model ID . < / p >
* @ param pItsId model ID */
@ Override public final void setItsId ( final CustomerGoodsSeenId pItsId ) { } }
|
this . itsId = pItsId ; if ( this . itsId != null ) { this . goods = this . itsId . getGoods ( ) ; this . customer = this . itsId . getCustomer ( ) ; } else { this . goods = null ; this . customer = null ; }
|
public class CPOptionCategoryPersistenceImpl { /** * Returns a range of all the cp option categories where groupId = & # 63 ; .
* Useful when paginating results . Returns a maximum of < code > end - start < / code > instances . < code > start < / code > and < code > end < / code > are not primary keys , they are indexes in the result set . Thus , < code > 0 < / code > refers to the first result in the set . Setting both < code > start < / code > and < code > end < / code > to { @ link QueryUtil # ALL _ POS } will return the full result set . If < code > orderByComparator < / code > is specified , then the query will include the given ORDER BY logic . If < code > orderByComparator < / code > is absent and pagination is required ( < code > start < / code > and < code > end < / code > are not { @ link QueryUtil # ALL _ POS } ) , then the query will include the default ORDER BY logic from { @ link CPOptionCategoryModelImpl } . If both < code > orderByComparator < / code > and pagination are absent , for performance reasons , the query will not have an ORDER BY clause and the returned result set will be sorted on by the primary key in an ascending order .
* @ param groupId the group ID
* @ param start the lower bound of the range of cp option categories
* @ param end the upper bound of the range of cp option categories ( not inclusive )
* @ return the range of matching cp option categories */
@ Override public List < CPOptionCategory > findByGroupId ( long groupId , int start , int end ) { } }
|
return findByGroupId ( groupId , start , end , null ) ;
|
public class MatrixToImageWriter { /** * Writes a { @ link BitMatrix } to a stream with default configuration .
* @ param matrix { @ link BitMatrix } to write
* @ param format image format
* @ param stream { @ link OutputStream } to write image to
* @ throws IOException if writes to the stream fail
* @ see # toBufferedImage ( BitMatrix ) */
public static void writeToStream ( BitMatrix matrix , String format , OutputStream stream ) throws IOException { } }
|
writeToStream ( matrix , format , stream , DEFAULT_CONFIG ) ;
|
public class ObjectNames { /** * Produce a generated JMX object name .
* @ return JMX object name of the form " [ package _ name ] : type = [ class _ name ] , name = [ ann _ class _ name ] " */
public static String generatedNameOf ( Class < ? > clazz , Class < ? extends Annotation > annotationClass ) { } }
|
return builder ( clazz , annotationClass ) . build ( ) ;
|
public class TransportResolver { /** * Trigger a event notifying the obtainment of all the candidates . */
private void triggerResolveEnd ( ) { } }
|
Iterator < TransportResolverListener > iter = getListenersList ( ) . iterator ( ) ; while ( iter . hasNext ( ) ) { TransportResolverListener trl = iter . next ( ) ; if ( trl instanceof TransportResolverListener . Resolver ) { TransportResolverListener . Resolver li = ( TransportResolverListener . Resolver ) trl ; li . end ( ) ; } }
|
public class SearchFilter { /** * Change the search filter to one that specifies a set of elements and their values
* that must match , and the operator to use to combine the elements .
* Each key is compared for an equal match to the value , and all
* comparisons are combined by the specified logical operator ( OR or AND ) .
* The old search filter is deleted .
* @ param elements is a hashtable holding key - value pairs
* @ param combine _ op is the logical operator to be used to combine the comparisons
* @ param compare _ op is the binary operator to be used for the comparisons
* @ exception DBException */
public void matchSet ( Hashtable elements , int combine_op , int compare_op ) throws DBException { } }
|
// Delete the old search filter
m_filter = null ; // If combine _ op is not a logical operator , throw an exception
if ( ( combine_op & LOGICAL_OPER_MASK ) == 0 ) { throw new DBException ( ) ; // If compare _ op is not a binary operator , throw an exception
} if ( ( compare_op & BINARY_OPER_MASK ) == 0 ) { throw new DBException ( ) ; // Create a vector that will hold the comparison nodes for all elements in the hashtable
} Vector compareVector = new Vector ( ) ; // For each of the elements in the hashtable , create a comparison node for the match
for ( Enumeration e = elements . keys ( ) ; e . hasMoreElements ( ) ; ) { // Get the element name from the enumerator
// and its value
String elementName = ( String ) e . nextElement ( ) ; String elementValue = ( String ) elements . get ( elementName ) ; // Create a comparison node for this list and store it as the filter
SearchBaseLeafComparison comparenode = new SearchBaseLeafComparison ( elementName , compare_op , elementValue ) ; // Add this leaf node to the vector
compareVector . addElement ( comparenode ) ; } // Now return a node that holds this set of leaf nodes
m_filter = new SearchBaseNode ( combine_op , compareVector ) ;
|
public class SchedulesInner { /** * Retrieve a list of schedules .
* @ param resourceGroupName Name of an Azure Resource group .
* @ param automationAccountName The name of the automation account .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the observable to the PagedList & lt ; ScheduleInner & gt ; object */
public Observable < Page < ScheduleInner > > listByAutomationAccountAsync ( final String resourceGroupName , final String automationAccountName ) { } }
|
return listByAutomationAccountWithServiceResponseAsync ( resourceGroupName , automationAccountName ) . map ( new Func1 < ServiceResponse < Page < ScheduleInner > > , Page < ScheduleInner > > ( ) { @ Override public Page < ScheduleInner > call ( ServiceResponse < Page < ScheduleInner > > response ) { return response . body ( ) ; } } ) ;
|
public class Assert { /** * Asserts that a Collection is not null and of a certain size
* @ param input The input under test
* @ param message The message for any exception */
public static void hasSize ( Collection < ? > input , int length , String message ) { } }
|
notNull ( input , message ) ; if ( input . size ( ) != length ) { throw new IllegalArgumentException ( message ) ; }
|
public class RectangleArranger { /** * Test method .
* @ param args */
public static void main ( String [ ] args ) { } }
|
LoggingConfiguration . setLevelFor ( RectangleArranger . class . getName ( ) , Level . FINEST . getName ( ) ) ; RectangleArranger < String > r = new RectangleArranger < > ( 1.3 ) ; r . put ( 4. , 1. , "Histogram" ) ; r . put ( 4. , 4. , "3D view" ) ; r . put ( 1. , 1. , "Meta 1" ) ; r . put ( 1. , 1. , "Meta 2" ) ; r . put ( 1. , 1. , "Meta 3" ) ; r . put ( 2. , 2. , "Meta 4" ) ; r . put ( 2. , 2. , "Meta 5" ) ; r = new RectangleArranger < > ( 3. , 3. ) ; r . put ( 1. , 2. , "A" ) ; r . put ( 2. , 1. , "B" ) ; r . put ( 1. , 2. , "C" ) ; r . put ( 2. , 1. , "D" ) ; r . put ( 2. , 2. , "E" ) ; r = new RectangleArranger < > ( 4 - 2.6521739130434785 ) ; r . put ( 4. , .5 , "A" ) ; r . put ( 4. , 3. , "B" ) ; r . put ( 4. , 1. , "C" ) ; r . put ( 1. , .1 , "D" ) ;
|
public class CanBeStaticAnalyzer { /** * Is sym a non - static member of an enclosing class of currentClass ? */
private static boolean memberOfEnclosing ( Symbol owner , VisitorState state , Symbol sym ) { } }
|
if ( sym == null || ! sym . hasOuterInstance ( ) ) { return false ; } for ( ClassSymbol encl = owner . owner . enclClass ( ) ; encl != null ; encl = encl . owner != null ? encl . owner . enclClass ( ) : null ) { if ( sym . isMemberOf ( encl , state . getTypes ( ) ) ) { return true ; } } return false ;
|
public class JavaWriter { /** * Annotates the next element with { @ code annotation } . The annotation has no
* attributes . */
public void annotation ( String annotation ) throws IOException { } }
|
indent ( ) ; out . write ( "@" ) ; type ( annotation ) ; out . write ( "\n" ) ;
|
public class HandlerChainInfoBuilder { /** * Get the handlerChainsInfo from @ HandlerChain
* @ param clz
* @ param portQName
* @ param serviceQName
* @ param bindingID
* @ return */
public HandlerChainsInfo buildHandlerChainsInfoFromAnnotation ( ClassInfo clzInfo , String seiClassName , InfoStore infoStore , QName portQName , QName serviceQName , String bindingID ) { } }
|
HandlerChainsInfo chainsInfo = new HandlerChainsInfo ( ) ; HandlerChainAnnotation hcAnn = findHandlerChainAnnotation ( clzInfo , seiClassName , infoStore , true ) ; if ( hcAnn != null ) { hcAnn . validate ( ) ; processHandlerChainAnnotation ( chainsInfo , hcAnn . getDeclaringClass ( ) . getName ( ) , hcAnn . getFileName ( ) , portQName , serviceQName , bindingID ) ; } return chainsInfo ;
|
public class BigDecimal { /** * Divides { @ code BigInteger } value by { @ code BigInteger } value and
* do rounding based on the passed in roundingMode . */
private static BigInteger divideAndRound ( BigInteger bdividend , BigInteger bdivisor , int roundingMode ) { } }
|
boolean isRemainderZero ; // record remainder is zero or not
int qsign ; // quotient sign
// Descend into mutables for faster remainder checks
MutableBigInteger mdividend = new MutableBigInteger ( bdividend . mag ) ; MutableBigInteger mq = new MutableBigInteger ( ) ; MutableBigInteger mdivisor = new MutableBigInteger ( bdivisor . mag ) ; MutableBigInteger mr = mdividend . divide ( mdivisor , mq ) ; isRemainderZero = mr . isZero ( ) ; qsign = ( bdividend . signum != bdivisor . signum ) ? - 1 : 1 ; if ( ! isRemainderZero ) { if ( needIncrement ( mdivisor , roundingMode , qsign , mq , mr ) ) { mq . add ( MutableBigInteger . ONE ) ; } } return mq . toBigInteger ( qsign ) ;
|
public class AbstractDataGridHtmlTag { /** * Generate a name and id given a { @ link AbstractHtmlState } object . Data grid callers may invoke this
* method with subclasses rendering markup containing tags that must set HTML tag IDs for use
* via JavaScript on the client .
* < br / >
* Assumptions :
* < ul >
* < li > The state . name must be fully formed or the " real name " of the form . < / li >
* < li > The state . id is the tagId value set on the tag and < b > has not < / b > be rewritten yet to form the " real id " < / li >
* < / ul >
* @ param state the HTML state whose tag id to set
* @ param parentForm a { @ link Form } tag if one contains this tag
* @ return String a block of JavaScript if script must e rendered to the page in order to support
* lookups of HTML elements using a tag id . If returned , the String < b > must < / b > be rendered
* to the output stream . < code > null < / code > if no script must be rendered . */
protected final String renderNameAndId ( HttpServletRequest request , AbstractHtmlState state , Form parentForm ) { } }
|
// if id is not set then we need to exit
if ( state . id == null ) return null ; // check to see if this is an instance of a HTML Control
boolean ctrlState = ( state instanceof AbstractHtmlControlState ) ; // form keeps track of this so that it can add this control to it ' s focus map
if ( parentForm != null && ctrlState ) { AbstractHtmlControlState hcs = ( AbstractHtmlControlState ) state ; if ( hcs . name == null && parentForm . isFocusSet ( ) ) hcs . name = state . id ; parentForm . addTagID ( state . id , ( ( AbstractHtmlControlState ) state ) . name ) ; } // rewrite the id , save the original value so it can be used in maps
String id = state . id ; state . id = getIdForTagId ( id ) ; // Legacy Java Script support - - This writes out a single table with both the id and names
// mixed . This is legacy support to match the pre beehive behavior .
String idScript = null ; if ( TagConfig . isLegacyJavaScript ( ) ) { ScriptRequestState srs = ScriptRequestState . getScriptRequestState ( request ) ; if ( ! ctrlState ) { idScript = srs . mapLegacyTagId ( getScriptReporter ( ) , id , state . id ) ; } else { AbstractHtmlControlState cState = ( AbstractHtmlControlState ) state ; if ( cState . name != null ) idScript = srs . mapLegacyTagId ( getScriptReporter ( ) , id , cState . name ) ; else idScript = srs . mapLegacyTagId ( getScriptReporter ( ) , id , state . id ) ; } } // map the tagId to the real id
String name = null ; if ( ctrlState ) { AbstractHtmlControlState cState = ( AbstractHtmlControlState ) state ; name = cState . name ; } String script = renderDefaultNameAndId ( ( HttpServletRequest ) request , state , id , name ) ; if ( script != null ) { if ( idScript != null ) idScript = idScript + script ; else idScript = script ; } return idScript ;
|
public class EhCacheImpl { /** * { @ inheritDoc } */
@ Override public boolean offer ( String name , Object obj ) { } }
|
boolean result = false ; try { result = cache . isKeyInCache ( name ) ; // Put an object into the cache
if ( ! result ) { put ( name , obj ) ; } // check again
result = cache . isKeyInCache ( name ) ; } catch ( NullPointerException npe ) { log . debug ( "Name: " + name + " Object: " + obj . getClass ( ) . getName ( ) , npe ) ; } return result ;
|
public class TaskQueueReader { /** * Make the request to the Twilio API to perform the read .
* @ param client TwilioRestClient with which to make the request
* @ return TaskQueue ResourceSet */
@ Override public ResourceSet < TaskQueue > read ( final TwilioRestClient client ) { } }
|
return new ResourceSet < > ( this , client , firstPage ( client ) ) ;
|
public class DNSOutput { /** * Writes an unsigned 32 bit value to the stream .
* @ param val The value to be written */
public void writeU32 ( long val ) { } }
|
check ( val , 32 ) ; need ( 4 ) ; array [ pos ++ ] = ( byte ) ( ( val >>> 24 ) & 0xFF ) ; array [ pos ++ ] = ( byte ) ( ( val >>> 16 ) & 0xFF ) ; array [ pos ++ ] = ( byte ) ( ( val >>> 8 ) & 0xFF ) ; array [ pos ++ ] = ( byte ) ( val & 0xFF ) ;
|
public class VisualizeFiducial { /** * Draws a flat cube to show where the square fiducial is on the image */
public static void drawLabelCenter ( Se3_F64 targetToCamera , CameraPinholeBrown intrinsic , String label , Graphics2D g2 , double scale ) { } }
|
// Computer the center of the fiducial in pixel coordinates
Point2D_F64 p = new Point2D_F64 ( ) ; Point3D_F64 c = new Point3D_F64 ( ) ; WorldToCameraToPixel worldToPixel = PerspectiveOps . createWorldToPixel ( intrinsic , targetToCamera ) ; worldToPixel . transform ( c , p ) ; drawLabel ( p , label , g2 , scale ) ;
|
public class AtomicIntegerSerializer { /** * @ Override
* public void write ( JsonWriter out , String name , AtomicInteger value )
* out . write ( name , value . get ( ) ) ; */
@ Override public void write ( JsonWriterImpl out , AtomicInteger value ) { } }
|
out . write ( value . get ( ) ) ;
|
public class AmazonRedshiftClient { /** * Describes whether information , such as queries and connection attempts , is being logged for the specified Amazon
* Redshift cluster .
* @ param describeLoggingStatusRequest
* @ return Result of the DescribeLoggingStatus operation returned by the service .
* @ throws ClusterNotFoundException
* The < code > ClusterIdentifier < / code > parameter does not refer to an existing cluster .
* @ sample AmazonRedshift . DescribeLoggingStatus
* @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / redshift - 2012-12-01 / DescribeLoggingStatus " target = " _ top " > AWS
* API Documentation < / a > */
@ Override public DescribeLoggingStatusResult describeLoggingStatus ( DescribeLoggingStatusRequest request ) { } }
|
request = beforeClientExecution ( request ) ; return executeDescribeLoggingStatus ( request ) ;
|
public class IOUtil { /** * 创建一个BufferedWriter
* @ param path
* @ return
* @ throws FileNotFoundException
* @ throws UnsupportedEncodingException */
public static BufferedWriter newBufferedWriter ( String path ) throws IOException { } }
|
return new BufferedWriter ( new OutputStreamWriter ( IOUtil . newOutputStream ( path ) , "UTF-8" ) ) ;
|
public class AutoSiteMap { /** * Recursively builds the list of all sites . */
private void buildData ( String path , WebPage page , List < TreePageData > data , WebSiteRequest req ) throws IOException , SQLException { } }
|
if ( isVisible ( page ) ) { if ( path . length ( ) > 0 ) path = path + '/' + page . getShortTitle ( ) ; else path = page . getShortTitle ( ) ; WebPage [ ] pages = page . getCachedPages ( req ) ; int len = pages . length ; data . add ( new TreePageData ( len > 0 ? ( path + '/' ) : path , req . getContextPath ( ) + req . getURL ( page ) , page . getDescription ( ) ) ) ; for ( int c = 0 ; c < len ; c ++ ) buildData ( path , pages [ c ] , data , req ) ; }
|
public class Binary16 { /** * Encode the unbiased exponent { @ code e } . Values should be in the
* range { @ code [ - 15 , 16 ] } - values outside of this range will be
* truncated .
* @ param e An exponent
* @ return A packed exponent
* @ see # unpackGetExponentUnbiased ( char ) */
public static char packSetExponentUnbiasedUnchecked ( final int e ) { } }
|
final int eb = e + BIAS ; final int es = eb << 10 ; final int em = es & MASK_EXPONENT ; return ( char ) em ;
|
public class DataStoreEvent { /** * Update event .
* @ param updates Updates
* @ return Event */
public static DataStoreEvent updateEvent ( DBIDs updates ) { } }
|
return new DataStoreEvent ( DBIDUtil . EMPTYDBIDS , DBIDUtil . EMPTYDBIDS , updates ) ;
|
public class AWSIotClient { /** * Attaches the specified policy to the specified principal ( certificate or other credential ) .
* < b > Note : < / b > This API is deprecated . Please use < a > AttachPolicy < / a > instead .
* @ param attachPrincipalPolicyRequest
* The input for the AttachPrincipalPolicy operation .
* @ return Result of the AttachPrincipalPolicy operation returned by the service .
* @ throws ResourceNotFoundException
* The specified resource does not exist .
* @ throws InvalidRequestException
* The request is not valid .
* @ throws ThrottlingException
* The rate exceeds the limit .
* @ throws UnauthorizedException
* You are not authorized to perform this operation .
* @ throws ServiceUnavailableException
* The service is temporarily unavailable .
* @ throws InternalFailureException
* An unexpected error has occurred .
* @ throws LimitExceededException
* A limit has been exceeded .
* @ sample AWSIot . AttachPrincipalPolicy */
@ Override @ Deprecated public AttachPrincipalPolicyResult attachPrincipalPolicy ( AttachPrincipalPolicyRequest request ) { } }
|
request = beforeClientExecution ( request ) ; return executeAttachPrincipalPolicy ( request ) ;
|
public class Index { /** * A convenient blocking call that can be used to wait on an index until it
* has either become active or deleted ( ie no longer exists ) by polling the
* table every 5 seconds .
* Currently online index creation / deletion is only supported for Global
* Secondary Index ( GSI ) . The behavior of calling this method on a Local
* Secondary Index ( LSI ) would result in returning the latest table
* description .
* @ return the table description when the index has become either active
* or deleted
* @ throws IllegalArgumentException if the table is being deleted
* @ throws ResourceNotFoundException if the table doesn ' t exist */
public TableDescription waitForActiveOrDelete ( ) throws InterruptedException { } }
|
final Table table = getTable ( ) ; final String indexName = getIndexName ( ) ; retry : for ( ; ; ) { TableDescription desc = table . waitForActive ( ) ; List < GlobalSecondaryIndexDescription > list = desc . getGlobalSecondaryIndexes ( ) ; if ( list != null ) { for ( GlobalSecondaryIndexDescription d : desc . getGlobalSecondaryIndexes ( ) ) { if ( d . getIndexName ( ) . equals ( indexName ) ) { final String status = d . getIndexStatus ( ) ; if ( IndexStatus . fromValue ( status ) == IndexStatus . ACTIVE ) return desc ; Thread . sleep ( SLEEP_TIME_MILLIS ) ; continue retry ; } } } return desc ; }
|
public class GenericsTrackingUtils { /** * Base idea : resolving class hierarchy with root generics as variables and compare resolved known type generics
* with actual generics ( in the simplest case { @ code Some < T > extends Base < T > } we will get
* { @ code TypeVariable ( T ) = = known generic ( of Base ) } ; other cases eventually leads to this one , e . g .
* { @ code Some < T > extends Base < List < T > > } ) .
* @ param type root type to track generics for
* @ param known class or interface with known generics ( in the middle of root type hierarchy )
* @ param knownGenerics generics of known type
* @ return root class generics ( row types were impossible to track )
* @ throws IllegalStateException when resolved generic of known type contradict with known generic value
* ( type can ' t be casted to known type ) */
private static LinkedHashMap < String , Type > trackGenerics ( final Class < ? > type , final Class < ? > known , final LinkedHashMap < String , Type > knownGenerics ) { } }
|
final Map < Class < ? > , LinkedHashMap < String , Type > > generics = TypeVariableUtils . trackRootVariables ( type ) ; // trace back generics ( what we can )
final Map < String , Type > tracedRootGenerics = new HashMap < String , Type > ( ) ; // required to check tracked type compatibility
final Map < String , Type > rawRootGenerics = GenericsResolutionUtils . resolveRawGenerics ( type ) ; for ( Map . Entry < String , Type > entry : generics . get ( known ) . entrySet ( ) ) { final Type actualType = entry . getValue ( ) ; final String genericName = entry . getKey ( ) ; final Type knownGenericType = knownGenerics . get ( genericName ) ; trackType ( tracedRootGenerics , rawRootGenerics , genericName , actualType , knownGenericType , type , known , knownGenerics ) ; } trackDependentVariables ( type , tracedRootGenerics ) ; // resolve all generics in correct resolution order
final Map < String , Type > tmpTypes = new HashMap < String , Type > ( tracedRootGenerics ) ; for ( TypeVariable gen : GenericsUtils . orderVariablesForResolution ( type . getTypeParameters ( ) ) ) { final String name = gen . getName ( ) ; final Type value = tracedRootGenerics . containsKey ( name ) ? tracedRootGenerics . get ( name ) // transform to wildcard to preserve possible multiple bounds declaration
// ( it will be flatten to Object if single bound declared )
: GenericsUtils . resolveTypeVariables ( gen . getBounds ( ) . length > 1 ? WildcardTypeImpl . upper ( gen . getBounds ( ) ) : gen . getBounds ( ) [ 0 ] , tmpTypes ) ; tmpTypes . put ( name , value ) ; } // finally apply correct generics order
final LinkedHashMap < String , Type > res = new LinkedHashMap < String , Type > ( ) ; for ( TypeVariable gen : type . getTypeParameters ( ) ) { res . put ( gen . getName ( ) , tmpTypes . get ( gen . getName ( ) ) ) ; } return res ;
|
public class A_CmsGroupEditor { /** * Updates the backup elements . < p >
* @ param updateElements the updated element data */
public void updateBackupElements ( Map < String , CmsContainerElementData > updateElements ) { } }
|
ArrayList < CmsContainerPageElementPanel > updatedList = new ArrayList < CmsContainerPageElementPanel > ( ) ; String containerId = m_groupContainer . getContainerId ( ) ; for ( CmsContainerPageElementPanel element : m_backUpElements ) { if ( updateElements . containsKey ( element . getId ( ) ) && CmsStringUtil . isNotEmptyOrWhitespaceOnly ( updateElements . get ( element . getId ( ) ) . getContents ( ) . get ( containerId ) ) ) { CmsContainerElementData elementData = updateElements . get ( element . getId ( ) ) ; try { CmsContainerPageElementPanel replacer = m_controller . getContainerpageUtil ( ) . createElement ( elementData , m_groupContainer , false ) ; if ( element . getInheritanceInfo ( ) != null ) { // in case of inheritance container editing , keep the inheritance info
replacer . setInheritanceInfo ( element . getInheritanceInfo ( ) ) ; } updatedList . add ( replacer ) ; } catch ( Exception e ) { // in this case keep the old version
updatedList . add ( element ) ; } } else { updatedList . add ( element ) ; } } m_backUpElements = updatedList ;
|
public class SoftHashMap { /** * Remove garbage collected soft values with the help of the reference queue . */
@ SuppressWarnings ( { } }
|
"rawtypes" , "unchecked" } ) private final void processQueue ( ) { SoftValue < V > softValue ; while ( ( softValue = ( SoftValue ) queue . poll ( ) ) != null ) { internalMap . remove ( softValue . key ) ; }
|
public class CmsImportFolder { /** * Imports the resources from a ZIP file in the real file system to the OpenCms VFS . < p >
* @ param zipStreamIn the input Stream
* @ param importPath the path in the vfs
* @ param noSubFolder if < code > true < / code > no sub folders will be created , if < code > false < / code > the content of the
* zip file is created 1:1 inclusive sub folders
* @ throws Exception if something goes wrong during file IO */
private void importZipResource ( ZipInputStream zipStreamIn , String importPath , boolean noSubFolder ) throws Exception { } }
|
// HACK : this method looks very crude , it should be re - written sometime . . .
boolean isFolder = false ; int j , r , stop , size ; int entries = 0 ; byte [ ] buffer = null ; boolean resourceExists ; while ( true ) { // handle the single entries . . .
j = 0 ; stop = 0 ; // open the entry . . .
ZipEntry entry = zipStreamIn . getNextEntry ( ) ; if ( entry == null ) { break ; } entries ++ ; // count number of entries in zip
String actImportPath = importPath ; String title = CmsResource . getName ( entry . getName ( ) ) ; String filename = m_cms . getRequestContext ( ) . getFileTranslator ( ) . translateResource ( entry . getName ( ) ) ; // separate path in directories an file name . . .
StringTokenizer st = new StringTokenizer ( filename , "/\\" ) ; int count = st . countTokens ( ) ; String [ ] path = new String [ count ] ; if ( filename . endsWith ( "\\" ) || filename . endsWith ( "/" ) ) { isFolder = true ; // last entry is a folder
} else { isFolder = false ; // last entry is a file
} while ( st . hasMoreTokens ( ) ) { // store the files and folder names in array . . .
path [ j ] = st . nextToken ( ) ; j ++ ; } stop = isFolder ? path . length : ( path . length - 1 ) ; if ( noSubFolder ) { stop = 0 ; } // now write the folders . . .
for ( r = 0 ; r < stop ; r ++ ) { try { CmsResource createdFolder = m_cms . createResource ( actImportPath + path [ r ] , CmsResourceTypeFolder . RESOURCE_TYPE_ID ) ; m_importedResources . add ( createdFolder ) ; } catch ( CmsException e ) { // of course some folders did already exist !
} actImportPath += path [ r ] ; actImportPath += "/" ; } if ( ! isFolder ) { // import file into cms
int type = OpenCms . getResourceManager ( ) . getDefaultTypeForName ( path [ path . length - 1 ] ) . getTypeId ( ) ; size = new Long ( entry . getSize ( ) ) . intValue ( ) ; if ( size == - 1 ) { buffer = CmsFileUtil . readFully ( zipStreamIn , false ) ; } else { buffer = CmsFileUtil . readFully ( zipStreamIn , size , false ) ; } filename = actImportPath + path [ path . length - 1 ] ; try { m_cms . lockResource ( filename ) ; m_cms . readResource ( filename ) ; resourceExists = true ; } catch ( CmsException e ) { resourceExists = false ; } int plainId = OpenCms . getResourceManager ( ) . getResourceType ( CmsResourceTypePlain . getStaticTypeName ( ) ) . getTypeId ( ) ; if ( resourceExists ) { CmsResource res = m_cms . readResource ( filename , CmsResourceFilter . ALL ) ; CmsFile file = m_cms . readFile ( res ) ; byte [ ] contents = file . getContents ( ) ; try { m_cms . replaceResource ( filename , res . getTypeId ( ) , buffer , new ArrayList < CmsProperty > ( 0 ) ) ; m_importedResources . add ( res ) ; } catch ( CmsSecurityException e ) { // in case of not enough permissions , try to create a plain text file
m_cms . replaceResource ( filename , plainId , buffer , new ArrayList < CmsProperty > ( 0 ) ) ; m_importedResources . add ( res ) ; } catch ( CmsDbSqlException sqlExc ) { // SQL error , probably the file is too large for the database settings , restore content
file . setContents ( contents ) ; m_cms . writeFile ( file ) ; throw sqlExc ; } } else { String newResName = actImportPath + path [ path . length - 1 ] ; if ( title . lastIndexOf ( '.' ) != - 1 ) { title = title . substring ( 0 , title . lastIndexOf ( '.' ) ) ; } List < CmsProperty > properties = new ArrayList < CmsProperty > ( 1 ) ; CmsProperty titleProp = new CmsProperty ( ) ; titleProp . setName ( CmsPropertyDefinition . PROPERTY_TITLE ) ; if ( OpenCms . getWorkplaceManager ( ) . isDefaultPropertiesOnStructure ( ) ) { titleProp . setStructureValue ( title ) ; } else { titleProp . setResourceValue ( title ) ; } properties . add ( titleProp ) ; try { m_importedResources . add ( m_cms . createResource ( newResName , type , buffer , properties ) ) ; } catch ( CmsSecurityException e ) { // in case of not enough permissions , try to create a plain text file
m_importedResources . add ( m_cms . createResource ( newResName , plainId , buffer , properties ) ) ; } catch ( CmsDbSqlException sqlExc ) { // SQL error , probably the file is too large for the database settings , delete file
m_cms . lockResource ( newResName ) ; m_cms . deleteResource ( newResName , CmsResource . DELETE_PRESERVE_SIBLINGS ) ; throw sqlExc ; } } } // close the entry . . .
zipStreamIn . closeEntry ( ) ; } zipStreamIn . close ( ) ; if ( entries > 0 ) { // at least one entry , got a valid zip file . . .
m_validZipFile = true ; }
|
public class OggFile { /** * Creates a new Logical Bit Stream in the file ,
* and returns a Writer for putting data
* into it . */
public OggPacketWriter getPacketWriter ( int sid ) { } }
|
if ( ! writing ) { throw new IllegalStateException ( "Can only write to a file opened with an OutputStream" ) ; } seenSIDs . add ( sid ) ; return new OggPacketWriter ( this , sid ) ;
|
public class MilestoneLineDisplayer { /** * Overriding the " standard " milestone behavior ( where we display something at each milestone )
* Instead , we populate a line drawer that will connect the steps */
@ Override public void draw ( Canvas pCanvas , MilestoneStep pStep ) { } }
|
if ( mFirst ) { mFirst = false ; } else { mLineDrawer . add ( pStep . getX ( ) , pStep . getY ( ) ) ; } mLineDrawer . add ( pStep . getX ( ) , pStep . getY ( ) ) ;
|
public class CollectionNamingConfusion { /** * looks for a name that mentions a collection type but the wrong type for the variable
* @ param methodOrVariableName
* the method or variable name
* @ param signature
* the variable signature
* @ return whether the name doesn ' t match the type */
@ edu . umd . cs . findbugs . annotations . SuppressFBWarnings ( value = "EXS_EXCEPTION_SOFTENING_RETURN_FALSE" , justification = "No other simple way to determine whether class exists" ) private boolean checkConfusedName ( String methodOrVariableName , String signature ) { } }
|
try { String name = methodOrVariableName . toLowerCase ( Locale . ENGLISH ) ; if ( ( name . endsWith ( "map" ) || ( name . endsWith ( "set" ) && ! name . endsWith ( "toset" ) ) || name . endsWith ( "list" ) || name . endsWith ( "queue" ) ) && signature . startsWith ( "Ljava/util/" ) ) { String clsName = SignatureUtils . stripSignature ( signature ) ; JavaClass cls = Repository . lookupClass ( clsName ) ; if ( ( cls . implementationOf ( mapInterface ) && ! name . endsWith ( "map" ) ) || ( cls . implementationOf ( setInterface ) && ! name . endsWith ( "set" ) ) || ( ( cls . implementationOf ( listInterface ) || cls . implementationOf ( queueInterface ) ) && ! name . endsWith ( "list" ) && ! name . endsWith ( "queue" ) ) ) { return true ; } } } catch ( ClassNotFoundException cnfe ) { bugReporter . reportMissingClass ( cnfe ) ; } return false ;
|
public class EasyXls { /** * 导出list对象到excel
* @ param list 导出的list
* @ param xmlPath xml完整路径
* @ param filePath 保存xls路径
* @ param fileName 保存xls文件名
* @ return 处理结果 , true成功 , false失败
* @ throws Exception */
public static boolean list2Xls ( List < ? > list , String xmlPath , String filePath , String fileName ) throws Exception { } }
|
return XlsUtil . list2Xls ( list , xmlPath , filePath , fileName ) ;
|
public class ZipUtil { /** * Creates a new { @ link ZipInputStream } based on the given { @ link InputStream } . It will be buffered and close - shielded .
* Closing the result stream flushes the buffers and frees up resources of the { @ link ZipInputStream } . However the source stream itself remains open . */
private static ZipInputStream newCloseShieldZipInputStream ( final InputStream is , Charset charset ) { } }
|
InputStream in = new BufferedInputStream ( new CloseShieldInputStream ( is ) ) ; if ( charset == null ) { return new ZipInputStream ( in ) ; } return ZipFileUtil . createZipInputStream ( in , charset ) ;
|
public class DataSink { /** * Sets the minimum and preferred resources for this data sink . and the lower and upper resource limits
* will be considered in resource resize feature for future plan .
* @ param minResources The minimum resources for this data sink .
* @ param preferredResources The preferred resources for this data sink .
* @ return The data sink with set minimum and preferred resources . */
private DataSink < T > setResources ( ResourceSpec minResources , ResourceSpec preferredResources ) { } }
|
Preconditions . checkNotNull ( minResources , "The min resources must be not null." ) ; Preconditions . checkNotNull ( preferredResources , "The preferred resources must be not null." ) ; Preconditions . checkArgument ( minResources . isValid ( ) && preferredResources . isValid ( ) && minResources . lessThanOrEqual ( preferredResources ) , "The values in resources must be not less than 0 and the preferred resources must be greater than the min resources." ) ; this . minResources = minResources ; this . preferredResources = preferredResources ; return this ;
|
public class FctBnAccEntitiesProcessors { /** * < p > Get PrcEmailMsgSave ( create and put into map ) . < / p >
* @ param pAddParam additional param
* @ return requested PrcEmailMsgSave
* @ throws Exception - an exception */
protected final PrcEmailMsgSave < RS > createPutPrcEmailMsgSave ( final Map < String , Object > pAddParam ) throws Exception { } }
|
@ SuppressWarnings ( "unchecked" ) PrcEmailMsgSave < RS > proc = ( PrcEmailMsgSave < RS > ) this . fctBnEntitiesProcessors . lazyGet ( pAddParam , PrcEmailMsgSave . class . getSimpleName ( ) ) ; // assigning fully initialized object :
this . processorsMap . put ( PrcEmailMsgSave . class . getSimpleName ( ) , proc ) ; return proc ;
|
public class DoubleArrayFunctionsND { /** * Assigns to each element of the given array the value that is provided
* by the given supplier .
* @ param a0 The array
* @ param s The supplier */
public static void set ( MutableDoubleArrayND a0 , DoubleSupplier s ) { } }
|
a0 . coordinates ( ) . parallel ( ) . forEach ( t -> { a0 . set ( t , s . getAsDouble ( ) ) ; } ) ;
|
public class MPXWriter { /** * This method is called to format a relation list .
* @ param value relation list instance
* @ return formatted relation list */
private String formatRelationList ( List < Relation > value ) { } }
|
String result = null ; if ( value != null && value . size ( ) != 0 ) { StringBuilder sb = new StringBuilder ( ) ; for ( Relation relation : value ) { if ( sb . length ( ) != 0 ) { sb . append ( m_delimiter ) ; } sb . append ( formatRelation ( relation ) ) ; } result = sb . toString ( ) ; } return ( result ) ;
|
public class PackageMojo { /** * region Generate function configurations */
protected Map < String , FunctionConfiguration > getFunctionConfigurations ( final AnnotationHandler handler , final Set < Method > methods ) throws Exception { } }
|
info ( "" ) ; info ( GENERATE_CONFIG ) ; final Map < String , FunctionConfiguration > configMap = handler . generateConfigurations ( methods ) ; if ( configMap . size ( ) == 0 ) { info ( GENERATE_SKIP ) ; } else { final String scriptFilePath = getScriptFilePath ( ) ; configMap . values ( ) . forEach ( config -> config . setScriptFile ( scriptFilePath ) ) ; info ( GENERATE_DONE ) ; } return configMap ;
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.