signature stringlengths 43 39.1k | implementation stringlengths 0 450k |
|---|---|
public class I18nRuntimeException { /** * Constructs our I18n exception message using the supplied bundle name .
* @ param aBundleName A name of a bundle in which to look up the supplied key .
* @ param aMessageKey A key value to look up in the < code > ResourceBundle < / code > .
* @ param aVarargs Additional details to use in formatting the message .
* @ return A formatted exception message */
private static String format ( final String aBundleName , final String aMessageKey , final Object ... aVarargs ) { } } | return format ( null , aBundleName , aMessageKey , aVarargs ) ; |
public class AbstractAsymmetricCrypto { /** * 编码为Base64字符串 , 使用UTF - 8编码
* @ param data 被加密的字符串
* @ param keyType 私钥或公钥 { @ link KeyType }
* @ return Base64字符串
* @ since 4.0.1 */
public String encryptBase64 ( String data , KeyType keyType ) { } } | return Base64 . encode ( encrypt ( data , keyType ) ) ; |
public class ResultDescriptor { /** * Returns the set of unique source names ; The names of the underlying samples used as the
* source of aggregations .
* @ return source names */
public Set < String > getSourceNames ( ) { } } | return Sets . newHashSet ( Iterables . transform ( getDatasources ( ) . values ( ) , new Function < Datasource , String > ( ) { @ Override public String apply ( Datasource input ) { return input . getSource ( ) ; } } ) ) ; |
public class MetadataManager { /** * Merge the given { @ link org . apache . ojb . broker . metadata . DescriptorRepository }
* files , the source objects will be pushed to the target repository . If parameter
* < tt > deep < / tt > is set < code > true < / code > deep copies of source objects were made .
* < br / >
* Note : Using < tt > deep copy mode < / tt > all descriptors will be serialized
* by using the default class loader to resolve classes . This can be problematic
* when classes are loaded by a context class loader .
* Note : All classes within the repository structure have to implement
* < code > java . io . Serializable < / code > to be able to create a cloned copy .
* @ see # isEnablePerThreadChanges
* @ see # setEnablePerThreadChanges */
public void mergeDescriptorRepository ( DescriptorRepository targetRepository , DescriptorRepository sourceRepository , boolean deep ) { } } | Iterator it = sourceRepository . iterator ( ) ; while ( it . hasNext ( ) ) { ClassDescriptor cld = ( ClassDescriptor ) it . next ( ) ; if ( deep ) { // TODO : adopt copy / clone methods for metadata classes ?
cld = ( ClassDescriptor ) SerializationUtils . clone ( cld ) ; } targetRepository . put ( cld . getClassOfObject ( ) , cld ) ; cld . setRepository ( targetRepository ) ; } |
public class AWSServiceCatalogClient { /** * Gets information about the specified constraint .
* @ param describeConstraintRequest
* @ return Result of the DescribeConstraint operation returned by the service .
* @ throws ResourceNotFoundException
* The specified resource was not found .
* @ sample AWSServiceCatalog . DescribeConstraint
* @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / servicecatalog - 2015-12-10 / DescribeConstraint "
* target = " _ top " > AWS API Documentation < / a > */
@ Override public DescribeConstraintResult describeConstraint ( DescribeConstraintRequest request ) { } } | request = beforeClientExecution ( request ) ; return executeDescribeConstraint ( request ) ; |
public class ConstantFlowRegulator { /** * Wrap the given service call with the { @ link ConstantFlowRegulator }
* protection logic .
* @ param c the { @ link Callable } to attempt
* @ return whatever c would return on success
* @ throws FlowRateExceededException if the total requests per second
* through the flow regulator exceeds the configured value
* @ throws Exception if < code > c < / code > throws one during
* execution */
public < T > T invoke ( Callable < T > c ) throws Exception { } } | if ( canProceed ( ) ) { return c . call ( ) ; } else { throw mapException ( new FlowRateExceededException ( ) ) ; } |
public class CmsHistoryList { /** * Restores a backed up resource version . < p >
* @ throws CmsException if something goes wrong */
protected void performRestoreOperation ( ) throws CmsException { } } | CmsUUID structureId = new CmsUUID ( getSelectedItem ( ) . get ( LIST_COLUMN_STRUCTURE_ID ) . toString ( ) ) ; int version = Integer . parseInt ( getSelectedItems ( ) . get ( 0 ) . getId ( ) ) ; if ( version == CmsHistoryResourceHandler . PROJECT_OFFLINE_VERSION ) { // it is not possible to restore the offline version
return ; } CmsResource res = getCms ( ) . readResource ( structureId , CmsResourceFilter . IGNORE_EXPIRATION ) ; checkLock ( getCms ( ) . getSitePath ( res ) ) ; getCms ( ) . restoreResourceVersion ( res . getStructureId ( ) , version ) ; |
public class AbstractBootstrap { /** * the { @ link ChannelHandler } to use for serving the requests . */
public B handler ( ChannelHandler handler ) { } } | if ( handler == null ) { throw new NullPointerException ( "handler" ) ; } this . handler = handler ; return self ( ) ; |
public class MappingUtils { /** * Checks is value is of Primitive type
* @ param value value which would be checked
* @ return true - if value is primitive ( or it ' s wrapper ) type */
public static boolean isPrimitive ( Object value ) { } } | if ( value == null ) { return true ; } else if ( value . getClass ( ) . isPrimitive ( ) == true ) { return true ; } else if ( Integer . class . isInstance ( value ) ) { return true ; } else if ( Long . class . isInstance ( value ) ) { return true ; } else if ( Double . class . isInstance ( value ) ) { return true ; } else if ( Float . class . isInstance ( value ) ) { return true ; } else if ( Short . class . isInstance ( value ) ) { return true ; } else if ( Byte . class . isInstance ( value ) ) { return true ; } else if ( Character . class . isInstance ( value ) ) { return true ; } else if ( Boolean . class . isInstance ( value ) ) { return true ; } else if ( BigDecimal . class . isInstance ( value ) ) { return true ; } else if ( BigInteger . class . isInstance ( value ) ) { return true ; } return false ; |
public class ExceptionSet { /** * Get the least ( lowest in the lattice ) common supertype of the exceptions
* in the set . Returns the special TOP type if the set is empty . */
public Type getCommonSupertype ( ) throws ClassNotFoundException { } } | if ( commonSupertype != null ) { return commonSupertype ; } if ( isEmpty ( ) ) { // This probably means that we ' re looking at an
// infeasible exception path .
return TypeFrame . getTopType ( ) ; } // Compute first common superclass
ThrownExceptionIterator i = iterator ( ) ; ReferenceType result = i . next ( ) ; while ( i . hasNext ( ) ) { if ( Subtypes2 . ENABLE_SUBTYPES2_FOR_COMMON_SUPERCLASS_QUERIES ) { result = AnalysisContext . currentAnalysisContext ( ) . getSubtypes2 ( ) . getFirstCommonSuperclass ( result , i . next ( ) ) ; } else { result = result . getFirstCommonSuperclass ( i . next ( ) ) ; } if ( result == null ) { // This should only happen if the class hierarchy
// is incomplete . We ' ll just be conservative .
result = Type . THROWABLE ; break ; } } // Cache and return the result
commonSupertype = result ; return result ; |
public class AuthorizerDescriptionMarshaller { /** * Marshall the given parameter object . */
public void marshall ( AuthorizerDescription authorizerDescription , ProtocolMarshaller protocolMarshaller ) { } } | if ( authorizerDescription == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( authorizerDescription . getAuthorizerName ( ) , AUTHORIZERNAME_BINDING ) ; protocolMarshaller . marshall ( authorizerDescription . getAuthorizerArn ( ) , AUTHORIZERARN_BINDING ) ; protocolMarshaller . marshall ( authorizerDescription . getAuthorizerFunctionArn ( ) , AUTHORIZERFUNCTIONARN_BINDING ) ; protocolMarshaller . marshall ( authorizerDescription . getTokenKeyName ( ) , TOKENKEYNAME_BINDING ) ; protocolMarshaller . marshall ( authorizerDescription . getTokenSigningPublicKeys ( ) , TOKENSIGNINGPUBLICKEYS_BINDING ) ; protocolMarshaller . marshall ( authorizerDescription . getStatus ( ) , STATUS_BINDING ) ; protocolMarshaller . marshall ( authorizerDescription . getCreationDate ( ) , CREATIONDATE_BINDING ) ; protocolMarshaller . marshall ( authorizerDescription . getLastModifiedDate ( ) , LASTMODIFIEDDATE_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class EventEngineImpl { /** * { @ inheritDoc } */
@ Override public EventHandle postEvent ( Topic topic , Map < ? , ? > properties ) { } } | return postEvent ( topic , properties , null ) ; |
public class PluginClassLoader { /** * By default , it uses a child first delegation model rather than the standard parent first .
* If the requested class cannot be found in this class loader , the parent class loader will be consulted
* via the standard { @ link ClassLoader # loadClass ( String ) } mechanism .
* Use { @ link # parentFirst } to change the loading strategy . */
@ Override public Class < ? > loadClass ( String className ) throws ClassNotFoundException { } } | synchronized ( getClassLoadingLock ( className ) ) { // first check whether it ' s a system class , delegate to the system loader
if ( className . startsWith ( JAVA_PACKAGE_PREFIX ) ) { return findSystemClass ( className ) ; } // if the class is part of the plugin engine use parent class loader
if ( className . startsWith ( PLUGIN_PACKAGE_PREFIX ) && ! className . startsWith ( "org.pf4j.demo" ) ) { // log . trace ( " Delegate the loading of PF4J class ' { } ' to parent " , className ) ;
return getParent ( ) . loadClass ( className ) ; } log . trace ( "Received request to load class '{}'" , className ) ; // second check whether it ' s already been loaded
Class < ? > loadedClass = findLoadedClass ( className ) ; if ( loadedClass != null ) { log . trace ( "Found loaded class '{}'" , className ) ; return loadedClass ; } if ( ! parentFirst ) { // nope , try to load locally
try { loadedClass = findClass ( className ) ; log . trace ( "Found class '{}' in plugin classpath" , className ) ; return loadedClass ; } catch ( ClassNotFoundException e ) { // try next step
} // look in dependencies
loadedClass = loadClassFromDependencies ( className ) ; if ( loadedClass != null ) { log . trace ( "Found class '{}' in dependencies" , className ) ; return loadedClass ; } log . trace ( "Couldn't find class '{}' in plugin classpath. Delegating to parent" , className ) ; // use the standard ClassLoader ( which follows normal parent delegation )
return super . loadClass ( className ) ; } else { // try to load from parent
try { return super . loadClass ( className ) ; } catch ( ClassCastException e ) { // try next step
} log . trace ( "Couldn't find class '{}' in parent. Delegating to plugin classpath" , className ) ; // nope , try to load locally
try { loadedClass = findClass ( className ) ; log . trace ( "Found class '{}' in plugin classpath" , className ) ; return loadedClass ; } catch ( ClassNotFoundException e ) { // try next step
} // look in dependencies
loadedClass = loadClassFromDependencies ( className ) ; if ( loadedClass != null ) { log . trace ( "Found class '{}' in dependencies" , className ) ; return loadedClass ; } throw new ClassNotFoundException ( className ) ; } } |
public class MoreCollectors { /** * Returns a { @ code Collector } which collects at most specified number of
* the first stream elements into the { @ link List } .
* This method returns a
* < a href = " package - summary . html # ShortCircuitReduction " > short - circuiting
* collector < / a > .
* There are no guarantees on the type , mutability , serializability , or
* thread - safety of the { @ code List } returned .
* The operation performed by the returned collector is equivalent to
* { @ code stream . limit ( n ) . collect ( Collectors . toList ( ) ) } . This collector is
* mostly useful as a downstream collector .
* @ param < T > the type of the input elements
* @ param n maximum number of stream elements to preserve
* @ return a collector which returns a { @ code List } containing the first n
* stream elements or less if the stream was shorter . */
public static < T > Collector < T , ? , List < T > > head ( int n ) { } } | if ( n <= 0 ) return empty ( ) ; return new CancellableCollectorImpl < > ( ArrayList :: new , ( acc , t ) -> { if ( acc . size ( ) < n ) acc . add ( t ) ; } , ( acc1 , acc2 ) -> { acc1 . addAll ( acc2 . subList ( 0 , Math . min ( acc2 . size ( ) , n - acc1 . size ( ) ) ) ) ; return acc1 ; } , Function . identity ( ) , acc -> acc . size ( ) >= n , ID_CHARACTERISTICS ) ; |
public class Unmarshaller { /** * Initializes the Embedded object represented by the given metadata .
* @ param embeddedMetadata
* the metadata of the embedded field
* @ param target
* the object in which the embedded field is declared / accessible from
* @ return the initialized object
* @ throws EntityManagerException
* if any error occurs during initialization of the embedded object */
private static Object initializeEmbedded ( EmbeddedMetadata embeddedMetadata , Object target ) { } } | try { ConstructorMetadata constructorMetadata = embeddedMetadata . getConstructorMetadata ( ) ; Object embeddedObject = null ; if ( constructorMetadata . isClassicConstructionStrategy ( ) ) { embeddedObject = embeddedMetadata . getReadMethod ( ) . invoke ( target ) ; } if ( embeddedObject == null ) { embeddedObject = constructorMetadata . getConstructorMethodHandle ( ) . invoke ( ) ; } return embeddedObject ; } catch ( Throwable t ) { throw new EntityManagerException ( t ) ; } |
public class StringUtils { /** * Normalize to canonical form .
* @ param input the input string
* @ return the normalized string */
public static String toCanonicalForm ( CharSequence input ) { } } | if ( input == null ) { return null ; } return StringFunctions . CANONICAL_NORMALIZATION . apply ( input . toString ( ) ) ; |
public class NodeSchema { /** * names are the same . Don ' t worry about the differentiator field . */
public boolean equalsOnlyNames ( NodeSchema otherSchema ) { } } | if ( otherSchema == null ) { return false ; } if ( otherSchema . size ( ) != size ( ) ) { return false ; } for ( int colIndex = 0 ; colIndex < size ( ) ; colIndex ++ ) { SchemaColumn col1 = otherSchema . getColumn ( colIndex ) ; SchemaColumn col2 = m_columns . get ( colIndex ) ; if ( col1 . compareNames ( col2 ) != 0 ) { return false ; } } return true ; |
public class CacheStatisticCollector { /** * Dumps all the cache statistic values to a { @ link StringBuilder } */
public final void dumpTo ( StringBuilder builder ) { } } | StringWriter stringWriter = new StringWriter ( ) ; globalContainer . dumpTo ( new PrintWriter ( stringWriter ) ) ; builder . append ( stringWriter . getBuffer ( ) ) ; |
public class WriterFactoryImpl { /** * { @ inheritDoc } */
public ProfilePackageSummaryWriter getProfilePackageSummaryWriter ( PackageDoc packageDoc , PackageDoc prevPkg , PackageDoc nextPkg , Profile profile ) throws Exception { } } | return new ProfilePackageWriterImpl ( configuration , packageDoc , prevPkg , nextPkg , profile ) ; |
public class DescribeCacheEngineVersionsResult { /** * A list of cache engine version details . Each element in the list contains detailed information about one cache
* engine version .
* @ param cacheEngineVersions
* A list of cache engine version details . Each element in the list contains detailed information about one
* cache engine version . */
public void setCacheEngineVersions ( java . util . Collection < CacheEngineVersion > cacheEngineVersions ) { } } | if ( cacheEngineVersions == null ) { this . cacheEngineVersions = null ; return ; } this . cacheEngineVersions = new com . amazonaws . internal . SdkInternalList < CacheEngineVersion > ( cacheEngineVersions ) ; |
public class DropwizardClusterStatsCollector { /** * protected access for testing purposes */
protected String getName ( final String key ) { } } | return MetricRegistry . name ( DropwizardClusterStatsCollector . class , "cluster" , clusterId . applicationName , clusterId . clusterName , key ) ; |
public class SVGAndroidRenderer { /** * Render dispatcher */
private void render ( SVG . SvgObject obj ) { } } | if ( obj instanceof NotDirectlyRendered ) return ; // Save state
statePush ( ) ; checkXMLSpaceAttribute ( obj ) ; if ( obj instanceof SVG . Svg ) { render ( ( SVG . Svg ) obj ) ; } else if ( obj instanceof SVG . Use ) { render ( ( SVG . Use ) obj ) ; } else if ( obj instanceof SVG . Switch ) { render ( ( SVG . Switch ) obj ) ; } else if ( obj instanceof SVG . Group ) { render ( ( SVG . Group ) obj ) ; } else if ( obj instanceof SVG . Image ) { render ( ( SVG . Image ) obj ) ; } else if ( obj instanceof SVG . Path ) { render ( ( SVG . Path ) obj ) ; } else if ( obj instanceof SVG . Rect ) { render ( ( SVG . Rect ) obj ) ; } else if ( obj instanceof SVG . Circle ) { render ( ( SVG . Circle ) obj ) ; } else if ( obj instanceof SVG . Ellipse ) { render ( ( SVG . Ellipse ) obj ) ; } else if ( obj instanceof SVG . Line ) { render ( ( SVG . Line ) obj ) ; } else if ( obj instanceof SVG . Polygon ) { render ( ( SVG . Polygon ) obj ) ; } else if ( obj instanceof SVG . PolyLine ) { render ( ( SVG . PolyLine ) obj ) ; } else if ( obj instanceof SVG . Text ) { render ( ( SVG . Text ) obj ) ; } // Restore state
statePop ( ) ; |
public class OcrClient { /** * Gets the idcard recognition properties of specific image resource .
* The caller < i > must < / i > authenticate with a valid BCE Access Key / Private Key pair .
* @ param image The image data which needs to be base64
* @ param side The side of idcard image . ( front / back )
* @ param direction Decide if the image has been rotated ( true / false )
* @ return The idcard recognition properties of the image resource */
public IdcardRecognitionResponse idcardRecognition ( String image , String side , Boolean direction ) { } } | IdcardRecognitionRequest request = new IdcardRecognitionRequest ( ) . withImage ( image ) . withSide ( side ) . withDirection ( direction ) ; return idcardRecognition ( request ) ; |
public class MessageReceiverFilterList { /** * Remove all the filters that have this as a listener .
* @ param listener Filters with this listener will be removed ( pass null to free them all ) . */
public void freeFiltersWithListener ( JMessageListener listener ) { } } | Object [ ] rgFilter = m_mapFilters . values ( ) . toArray ( ) ; for ( int i = 0 ; i < rgFilter . length ; i ++ ) { BaseMessageFilter filter = ( BaseMessageFilter ) rgFilter [ i ] ; for ( int j = 0 ; ( filter . getMessageListener ( j ) != null ) ; j ++ ) { if ( ( filter . getMessageListener ( j ) == listener ) || ( listener == null ) ) filter . free ( ) ; } } |
public class AsyncMutateInBuilder { /** * Set insertDocument to true , if the document has to be created only if it does not exist
* @ param insertDocument true to insert document . */
@ InterfaceStability . Committed public AsyncMutateInBuilder insertDocument ( boolean insertDocument ) { } } | if ( this . upsertDocument && insertDocument ) { throw new IllegalArgumentException ( "Cannot set both upsertDocument and insertDocument to true" ) ; } this . insertDocument = insertDocument ; return this ; |
public class UpgradeManagerDatanode { /** * Start distributed upgrade .
* Instantiates distributed upgrade objects .
* @ return true if distributed upgrade is required or false otherwise
* @ throws IOException */
public synchronized boolean startUpgrade ( ) throws IOException { } } | if ( upgradeState ) { // upgrade is already in progress
assert currentUpgrades != null : "UpgradeManagerDatanode.currentUpgrades is null." ; UpgradeObjectDatanode curUO = ( UpgradeObjectDatanode ) currentUpgrades . first ( ) ; curUO . startUpgrade ( ) ; return true ; } if ( broadcastCommand != null ) { if ( broadcastCommand . getVersion ( ) > this . getUpgradeVersion ( ) ) { // stop broadcasting , the cluster moved on
// start upgrade for the next version
broadcastCommand = null ; } else { // the upgrade has been finished by this data - node ,
// but the cluster is still running it ,
// reply with the broadcast command
assert currentUpgrades == null : "UpgradeManagerDatanode.currentUpgrades is not null." ; assert upgradeDaemon == null : "UpgradeManagerDatanode.upgradeDaemon is not null." ; dataNode . getNSNamenode ( namespaceId ) . processUpgradeCommand ( broadcastCommand ) ; return true ; } } if ( currentUpgrades == null ) currentUpgrades = getDistributedUpgrades ( ) ; if ( currentUpgrades == null ) { DataNode . LOG . info ( "\n Distributed upgrade for DataNode version " + getUpgradeVersion ( ) + " to current LV " + FSConstants . LAYOUT_VERSION + " cannot be started. " + "The upgrade object is not defined." ) ; return false ; } upgradeState = true ; UpgradeObjectDatanode curUO = ( UpgradeObjectDatanode ) currentUpgrades . first ( ) ; curUO . setDatanode ( dataNode ) ; curUO . startUpgrade ( ) ; upgradeDaemon = new Daemon ( curUO ) ; upgradeDaemon . start ( ) ; DataNode . LOG . info ( "\n Distributed upgrade for DataNode " + dataNode . getDatanodeInfo ( ) + " version " + getUpgradeVersion ( ) + " to current LV " + FSConstants . LAYOUT_VERSION + " is started." ) ; return true ; |
public class GraphAnalysisLoader { /** * Resolves the given class name into a { @ link TypeElement } . The class name is a binary name , but
* { @ link Elements # getTypeElement ( CharSequence ) } wants a canonical name . So this method searches
* the space of possible canonical names , starting with the most likely ( since ' $ ' is rarely used
* in canonical class names ) . */
@ VisibleForTesting static TypeElement resolveType ( Elements elements , String className ) { } } | int index = nextDollar ( className , className , 0 ) ; if ( index == - 1 ) { return getTypeElement ( elements , className ) ; } // have to test various possibilities of replacing ' $ ' with ' . ' since ' . ' in a canonical name
// of a nested type is replaced with ' $ ' in the binary name .
StringBuilder sb = new StringBuilder ( className ) ; return resolveType ( elements , className , sb , index ) ; |
public class GrouperEntityGroupStoreFactory { /** * Construction with parameters .
* @ param svcDescriptor The parameters .
* @ return The instance .
* @ throws GroupsException if there is an error
* @ see IEntityGroupStoreFactory
* # newGroupStore ( org . apereo . portal . groups . ComponentGroupServiceDescriptor ) */
@ Override public IEntityGroupStore newGroupStore ( ComponentGroupServiceDescriptor svcDescriptor ) throws GroupsException { } } | if ( LOGGER . isDebugEnabled ( ) ) { LOGGER . debug ( "Creating New Grouper IEntityGroupStore" ) ; } return getGroupStore ( ) ; |
public class TmdbEpisodes { /** * Get the primary information about a TV episode by combination of a season
* and episode number .
* @ param tvID
* @ param seasonNumber
* @ param episodeNumber
* @ param language
* @ param appendToResponse
* @ return
* @ throws MovieDbException */
public TVEpisodeInfo getEpisodeInfo ( int tvID , int seasonNumber , int episodeNumber , String language , String ... appendToResponse ) throws MovieDbException { } } | TmdbParameters parameters = new TmdbParameters ( ) ; parameters . add ( Param . ID , tvID ) ; parameters . add ( Param . SEASON_NUMBER , seasonNumber ) ; parameters . add ( Param . EPISODE_NUMBER , episodeNumber ) ; parameters . add ( Param . LANGUAGE , language ) ; parameters . add ( Param . APPEND , appendToResponse ) ; URL url = new ApiUrl ( apiKey , MethodBase . EPISODE ) . buildUrl ( parameters ) ; String webpage = httpTools . getRequest ( url ) ; try { return MAPPER . readValue ( webpage , TVEpisodeInfo . class ) ; } catch ( IOException ex ) { throw new MovieDbException ( ApiExceptionType . MAPPING_FAILED , "Failed to get TV Episode Info" , url , ex ) ; } |
public class EJBMDOrchestrator { /** * F743-506 */
private void processAutomaticTimerMetaData ( BeanMetaData bmd ) throws EJBConfigurationException , ContainerException { } } | final boolean isTraceOn = TraceComponent . isAnyTracingEnabled ( ) ; if ( isTraceOn && tc . isEntryEnabled ( ) ) Tr . entry ( tc , "processAutomaticTimerMetaData: " + bmd . j2eeName ) ; // There are several considerations for how to obtain this metadata :
// 1 . Schedule / Schedules annotations can exist on super classes , so we
// cannot exclusively rely on ASM scanning .
// 2 . Schedule / Schedules annotations can exist on private methods , and
// there is no way to unambiguously specify in XML which class in the
// hierarchy contains the method , so we cannot exclusively rely on
// the merged view .
// 3 . We must have a complete view of annotations at application start
// time so we can create those timers . However , we do not want to
// load bean classes or super - classes unless absolutely necessary .
// In order to accurately reason about annotations , we load bean classes .
// However , to avoid loading every bean class at application startup , we
// only do so if the merged view tells us we should expect to find
// automatic timers .
if ( bmd . ivInitData . ivHasScheduleTimers == null ) { if ( isTraceOn && tc . isEntryEnabled ( ) ) Tr . exit ( tc , "processAutomaticTimerMetaData: class scan data not found" ) ; // If there ' s no merged view data , then just keep going .
} else if ( ! bmd . ivInitData . ivHasScheduleTimers ) { if ( isTraceOn && tc . isEntryEnabled ( ) ) Tr . exit ( tc , "processAutomaticTimerMetaData: no class scan scheduler timers" ) ; return ; } // The following rules are used for timers :
// 1 . @ Schedule , @ Schedules , and < timer / > are all additive when applied
// to the same method .
// 2 . For non - private methods , @ Schedule and @ Schedules are only
// applicable for lowest sub - class ( ala transaction attributes ) .
// 3 . For private methods , @ Schedule and @ Schedules are applicable for
// any method in the class hierarchy .
// 4 . For < timer / > , the method is located on the lowest sub - class ,
// regardless of its visibility .
// F743-15870
// Timers can be specified in xml with three types of parm lists :
// 1 ) < method - parm > javax . ejb . Timer < / method - parm > ( ie , 1 parm )
// 2 ) < method - parm > < method - parm > ( ie , 0 parm )
// 3 ) omitting < method - params > and / or < method - parm > ( ie , unspecified parm )
// Timers with 1 parm can only map to Methods with 1 parm .
// Timers with 0 parm can only map to Methods with 0 parm .
// Timers with unspecified parms can map to Methods with 1 or 0 parm .
List < com . ibm . ws . javaee . dd . ejb . Timer > timers = getAutomaticTimersFromXML ( bmd . wccm . enterpriseBean ) ; Map < String , List < com . ibm . ws . javaee . dd . ejb . Timer > > timers0ParmByMethodName = new HashMap < String , List < com . ibm . ws . javaee . dd . ejb . Timer > > ( ) ; Map < String , List < com . ibm . ws . javaee . dd . ejb . Timer > > timers1ParmByMethodName = new HashMap < String , List < com . ibm . ws . javaee . dd . ejb . Timer > > ( ) ; Map < String , List < com . ibm . ws . javaee . dd . ejb . Timer > > timersUnspecifiedParmByMethodName = new HashMap < String , List < com . ibm . ws . javaee . dd . ejb . Timer > > ( ) ; // Add each timer specified in xml to the correct list , based on its parms .
for ( com . ibm . ws . javaee . dd . ejb . Timer timer : timers ) { NamedMethod namedMethod = timer . getTimeoutMethod ( ) ; String methodName = timer . getTimeoutMethod ( ) . getMethodName ( ) ; List < String > methodParams = namedMethod . getMethodParamList ( ) ; int parmCount = verifyXMLTimerParmList ( methodParams , // F743-15870
bmd , methodName , false ) ; addTimerToCorrectMap ( timers1ParmByMethodName , // F743-15870
timers0ParmByMethodName , timersUnspecifiedParmByMethodName , parmCount , methodName , timer ) ; } // Load the bean class .
Class < ? > beanClass = loadCustomerProvidedBeanClass ( bmd ) ; // Determine if the unspecified timers map to 0 or 1 parm Methods
if ( ! timersUnspecifiedParmByMethodName . isEmpty ( ) ) { mapUnspecifiedTimers ( timers0ParmByMethodName , timers1ParmByMethodName , timersUnspecifiedParmByMethodName , beanClass , bmd ) ; } // Process timeout metadata . If we have a timeout method , create a
// TimerMethodData with a special depth of - 1 . This will ensure it is
// sorted as the first timer method . This TimerMethodData will be
// reused below if an automatic timer also exists for the method .
processTimeoutMetaData ( bmd ) ; List < TimerMethodData > timerMethods = new ArrayList < TimerMethodData > ( ) ; Method timeoutMethod = getTimeoutMethod ( bmd ) ; if ( timeoutMethod != null ) { boolean oneParm = timeoutMethod . getParameterTypes ( ) . length == 1 ; // F743-15870
timerMethods . add ( new TimerMethodData ( timeoutMethod , - 1 , oneParm ) ) ; // F743-15780
} // Iterate over all methods declared in all classes , and build a list of
// AutomaticTimerMethod objects for this bean .
for ( MethodMap . MethodInfo methodInfo : MethodMap . getAllDeclaredMethods ( beanClass ) ) { Method method = methodInfo . getMethod ( ) ; int depth = methodInfo . getClassDepth ( ) ; String methodName = method . getName ( ) ; TimerMethodData timerMethod = null ; boolean createdTimerMethod = false ; if ( method . equals ( timeoutMethod ) ) { timerMethod = timerMethods . get ( 0 ) ; if ( isTraceOn && tc . isDebugEnabled ( ) ) Tr . debug ( tc , "using timeout method ID for " + method ) ; } // Process XML timers . By removing the list of timers from
// the map , we are ensuring the timers apply to methods on
// the lowest sub - class only .
List < com . ibm . ws . javaee . dd . ejb . Timer > timerList ; boolean methodHas1Parm = false ; if ( hasTimeoutCallbackParameters ( methodInfo ) ) { if ( methodInfo . getNumParameters ( ) == 0 ) { timerList = timers0ParmByMethodName . remove ( methodName ) ; } else { timerList = timers1ParmByMethodName . remove ( methodName ) ; methodHas1Parm = true ; } } else { timerList = null ; } if ( timerList != null ) { if ( isTraceOn && tc . isDebugEnabled ( ) ) Tr . debug ( tc , "processing XML timer for " + method ) ; if ( timerMethod == null ) { timerMethod = new TimerMethodData ( method , depth , methodHas1Parm ) ; // F743-15870
timerMethods . add ( timerMethod ) ; createdTimerMethod = true ; // d666251
} for ( com . ibm . ws . javaee . dd . ejb . Timer timer : timerList ) { timerMethod . addAutomaticTimer ( processAutomaticTimerFromXML ( timer ) ) ; } } // Check annoations if metadata is NOT complete , and we did NOT
// have any xml defined timer ( 0 , 1 , or undefined parms ) mapping
// to this Method
else if ( ! bmd . metadataComplete ) { // Process the @ Schedules annotation , if any .
Schedules schedulesAnnotation = method . getAnnotation ( Schedules . class ) ; if ( schedulesAnnotation != null ) { if ( isTraceOn && tc . isDebugEnabled ( ) ) Tr . debug ( tc , "processing @Schedules for " + method ) ; for ( Schedule scheduleAnnotation : schedulesAnnotation . value ( ) ) { if ( timerMethod == null ) { timerMethod = new TimerMethodData ( method , depth , methodHas1Parm ) ; timerMethods . add ( timerMethod ) ; createdTimerMethod = true ; // d666251
} timerMethod . addAutomaticTimer ( processScheduleAnnotation ( timerMethod , method , timerMethods , scheduleAnnotation ) ) ; } } // Process the @ Schedule annotation , if any .
Schedule scheduleAnnotation = method . getAnnotation ( Schedule . class ) ; if ( scheduleAnnotation != null ) { if ( isTraceOn && tc . isDebugEnabled ( ) ) Tr . debug ( tc , "processing @Schedule for " + method ) ; if ( timerMethod == null ) { timerMethod = new TimerMethodData ( method , depth , methodHas1Parm ) ; // F743-15870
timerMethods . add ( timerMethod ) ; createdTimerMethod = true ; // d666251
} timerMethod . addAutomaticTimer ( processScheduleAnnotation ( timerMethod , method , timerMethods , scheduleAnnotation ) ) ; } } // d666251 - Validate method signature .
if ( createdTimerMethod ) { validateTimeoutCallbackMethod ( bmd , methodInfo ) ; } } // Diagnose any unsatisfied automatic timers from XML .
dealWithUnsatisifedXMLTimers ( timers1ParmByMethodName , // F743-15870
timers0ParmByMethodName , bmd ) ; if ( ! timerMethods . isEmpty ( ) ) { if ( bmd . type == InternalConstants . TYPE_STATEFUL_SESSION || bmd . type == InternalConstants . TYPE_MANAGED_BEAN ) { Tr . error ( tc , "AUTOMATIC_TIMER_ON_STATEFUL_SESSION_CNTR0207E" , new Object [ ] { bmd . j2eeName . getComponent ( ) , bmd . j2eeName . getModule ( ) } ) ; throw new EJBConfigurationException ( "CNTR0207E: The " + bmd . j2eeName . getComponent ( ) + " stateful session bean in the " + bmd . j2eeName . getModule ( ) + " module has an automatic timer." ) ; } // Sort the timer methods , and then assign method IDs . We must sort
// methods so that method IDs remain stable for persistent timers even
// if the JVM changes the order of methods returned by getMethods ( ) .
Collections . sort ( timerMethods ) ; int methodId = 0 ; for ( TimerMethodData timerMethod : timerMethods ) { timerMethod . ivMethodId = methodId ++ ; } // Add the timer methods to module metadata so they will be created
// once all the automatic timers for the module have been processed .
bmd . _moduleMetaData . addAutomaticTimerBean ( new AutomaticTimerBean ( bmd , timerMethods ) ) ; // Check now if the runtime environment supports timers
// rather than waiting until the automatic timers are created .
bmd . container . getEJBRuntime ( ) . setupTimers ( bmd ) ; } // Always insert so finishBMDInit does not do extra work by calling
// processTimeoutMetaData during deferred EJB initialization .
bmd . ivInitData . ivTimerMethods = timerMethods ; if ( isTraceOn && tc . isEntryEnabled ( ) ) Tr . exit ( tc , "processAutomaticTimerMetaData: " + timerMethods . size ( ) ) ; |
public class DnsNameResolver { /** * Sends a DNS query with the specified question with additional records . */
public Future < AddressedEnvelope < DnsResponse , InetSocketAddress > > query ( DnsQuestion question , Iterable < DnsRecord > additionals ) { } } | return query ( nextNameServerAddress ( ) , question , additionals ) ; |
public class ExecutionEntity { /** * scopes / / / / / */
@ Override @ SuppressWarnings ( "unchecked" ) public void initialize ( ) { } } | LOG . initializeExecution ( this ) ; ScopeImpl scope = getScopeActivity ( ) ; ensureParentInitialized ( ) ; List < VariableDeclaration > variableDeclarations = ( List < VariableDeclaration > ) scope . getProperty ( BpmnParse . PROPERTYNAME_VARIABLE_DECLARATIONS ) ; if ( variableDeclarations != null ) { for ( VariableDeclaration variableDeclaration : variableDeclarations ) { variableDeclaration . initialize ( this , parent ) ; } } if ( isProcessInstanceExecution ( ) ) { String initiatorVariableName = ( String ) processDefinition . getProperty ( BpmnParse . PROPERTYNAME_INITIATOR_VARIABLE_NAME ) ; if ( initiatorVariableName != null ) { String authenticatedUserId = Context . getCommandContext ( ) . getAuthenticatedUserId ( ) ; setVariable ( initiatorVariableName , authenticatedUserId ) ; } } // create event subscriptions for the current scope
for ( EventSubscriptionDeclaration declaration : EventSubscriptionDeclaration . getDeclarationsForScope ( scope ) . values ( ) ) { if ( ! declaration . isStartEvent ( ) ) { declaration . createSubscriptionForExecution ( this ) ; } } |
public class SQLiteConnectionPool { /** * Might throw . */
private SQLiteConnection waitForConnection ( String sql , int connectionFlags , CancellationSignal cancellationSignal ) { } } | final boolean wantPrimaryConnection = ( connectionFlags & CONNECTION_FLAG_PRIMARY_CONNECTION_AFFINITY ) != 0 ; final ConnectionWaiter waiter ; final int nonce ; synchronized ( mLock ) { throwIfClosedLocked ( ) ; // Abort if canceled .
if ( cancellationSignal != null ) { cancellationSignal . throwIfCanceled ( ) ; } // Try to acquire a connection .
SQLiteConnection connection = null ; if ( ! wantPrimaryConnection ) { connection = tryAcquireNonPrimaryConnectionLocked ( sql , connectionFlags ) ; // might throw
} if ( connection == null ) { connection = tryAcquirePrimaryConnectionLocked ( connectionFlags ) ; // might throw
} if ( connection != null ) { return connection ; } // No connections available . Enqueue a waiter in priority order .
final int priority = getPriority ( connectionFlags ) ; final long startTime = SystemClock . uptimeMillis ( ) ; waiter = obtainConnectionWaiterLocked ( Thread . currentThread ( ) , startTime , priority , wantPrimaryConnection , sql , connectionFlags ) ; ConnectionWaiter predecessor = null ; ConnectionWaiter successor = mConnectionWaiterQueue ; while ( successor != null ) { if ( priority > successor . mPriority ) { waiter . mNext = successor ; break ; } predecessor = successor ; successor = successor . mNext ; } if ( predecessor != null ) { predecessor . mNext = waiter ; } else { mConnectionWaiterQueue = waiter ; } nonce = waiter . mNonce ; } // Set up the cancellation listener .
if ( cancellationSignal != null ) { cancellationSignal . setOnCancelListener ( new CancellationSignal . OnCancelListener ( ) { @ Override public void onCancel ( ) { synchronized ( mLock ) { if ( waiter . mNonce == nonce ) { cancelConnectionWaiterLocked ( waiter ) ; } } } } ) ; } try { // Park the thread until a connection is assigned or the pool is closed .
// Rethrow an exception from the wait , if we got one .
long busyTimeoutMillis = CONNECTION_POOL_BUSY_MILLIS ; long nextBusyTimeoutTime = waiter . mStartTime + busyTimeoutMillis ; for ( ; ; ) { // Detect and recover from connection leaks .
if ( mConnectionLeaked . compareAndSet ( true , false ) ) { synchronized ( mLock ) { wakeConnectionWaitersLocked ( ) ; } } // Wait to be unparked ( may already have happened ) , a timeout , or interruption .
LockSupport . parkNanos ( this , busyTimeoutMillis * 1000000L ) ; // Clear the interrupted flag , just in case .
Thread . interrupted ( ) ; // Check whether we are done waiting yet .
synchronized ( mLock ) { throwIfClosedLocked ( ) ; final SQLiteConnection connection = waiter . mAssignedConnection ; final RuntimeException ex = waiter . mException ; if ( connection != null || ex != null ) { recycleConnectionWaiterLocked ( waiter ) ; if ( connection != null ) { return connection ; } throw ex ; // rethrow !
} final long now = SystemClock . uptimeMillis ( ) ; if ( now < nextBusyTimeoutTime ) { busyTimeoutMillis = now - nextBusyTimeoutTime ; } else { logConnectionPoolBusyLocked ( now - waiter . mStartTime , connectionFlags ) ; busyTimeoutMillis = CONNECTION_POOL_BUSY_MILLIS ; nextBusyTimeoutTime = now + busyTimeoutMillis ; } } } } finally { // Remove the cancellation listener .
if ( cancellationSignal != null ) { cancellationSignal . setOnCancelListener ( null ) ; } } |
public class VertxServerHttpExchange { /** * { @ link HttpServerRequest } is available . */
@ Override public < T > T unwrap ( Class < T > clazz ) { } } | return HttpServerRequest . class . isAssignableFrom ( clazz ) ? clazz . cast ( request ) : null ; |
public class AbstractManagedServiceFactory { protected static String getProperty ( String name , Dictionary < String , ? > configProperties ) throws ConfigurationException { } } | return getProperty ( name , true , configProperties ) ; |
public class JDBCResourceMBeanImpl { /** * ( non - Javadoc )
* @ see com . ibm . websphere . management . j2ee . JDBCResourceMBean # getjdbcDataSources ( ) */
@ Override public String [ ] getjdbcDataSources ( ) { } } | final String methodName = "getjdbcDataSources()" ; final boolean trace = TraceComponent . isAnyTracingEnabled ( ) ; if ( trace && tc . isEntryEnabled ( ) ) Tr . entry ( tc , methodName , this ) ; final Collection < JDBCDataSourceMBeanImpl > c = dataSourceMBeanChildrenList . values ( ) ; final int size = c . size ( ) ; final String [ ] result = new String [ size ] ; int index = 0 ; for ( JDBCDataSourceMBeanImpl mbeanItem : c ) result [ index ++ ] = mbeanItem . getobjectName ( ) ; if ( trace && tc . isEntryEnabled ( ) ) Tr . exit ( tc , methodName , this ) ; return result ; // return dataSourcesList . toArray ( new String [ dataSourcesList . size ( ) ] ) ; |
public class QualitygatesService { /** * This is part of the internal API .
* This is a POST request .
* @ see < a href = " https : / / next . sonarqube . com / sonarqube / web _ api / api / qualitygates / destroy " > Further information about this action online ( including a response example ) < / a >
* @ since 4.3 */
public void destroy ( DestroyRequest request ) { } } | call ( new PostRequest ( path ( "destroy" ) ) . setParam ( "id" , request . getId ( ) ) . setParam ( "organization" , request . getOrganization ( ) ) . setMediaType ( MediaTypes . JSON ) ) . content ( ) ; |
public class ResourceLoader { /** * Load the global ConformanceConfig */
public static ConformanceConfig loadGlobalConformance ( Class < ? > clazz ) { } } | ConformanceConfig . Builder builder = ConformanceConfig . newBuilder ( ) ; if ( resourceExists ( clazz , "global_conformance.binarypb" ) ) { try { builder . mergeFrom ( clazz . getResourceAsStream ( "global_conformance.binarypb" ) ) ; } catch ( IOException e ) { throw new RuntimeException ( e ) ; } } return builder . build ( ) ; |
public class WindowUtil { /** * Returns the vertical scroll position needed to place the specified target widget in view ,
* while trying to minimize scrolling . */
public static int getScrollIntoView ( Widget target ) { } } | int top = Window . getScrollTop ( ) , height = Window . getClientHeight ( ) ; int ttop = target . getAbsoluteTop ( ) , theight = target . getOffsetHeight ( ) ; // if the target widget is taller than the browser window , or is above the current scroll
// position of the browser window , scroll the top of the widget to the top of the window
if ( theight > height || ttop < top ) { return ttop ; // otherwise scroll the bottom of the widget to the bottom of the window
} else if ( ttop + theight > top + height ) { return ttop - ( height - theight ) ; } else { return top ; // no scrolling needed
} |
public class MmtfUtils { /** * Count the total number of groups in the structure
* @ param structure the input structure
* @ return the total number of groups */
public static int getNumGroups ( Structure structure ) { } } | int count = 0 ; for ( int i = 0 ; i < structure . nrModels ( ) ; i ++ ) { for ( Chain chain : structure . getChains ( i ) ) { count += chain . getAtomGroups ( ) . size ( ) ; } } return count ; |
public class AstaDatabaseFileReader { /** * Extract a list of time entries .
* @ param shiftData string representation of time entries
* @ return list of time entry rows */
private List < Row > createTimeEntryRowList ( String shiftData ) throws ParseException { } } | List < Row > list = new ArrayList < Row > ( ) ; String [ ] shifts = shiftData . split ( ",|:" ) ; int index = 1 ; while ( index < shifts . length ) { index += 2 ; int entryCount = Integer . parseInt ( shifts [ index ] ) ; index ++ ; for ( int entryIndex = 0 ; entryIndex < entryCount ; entryIndex ++ ) { Integer exceptionTypeID = Integer . valueOf ( shifts [ index + 0 ] ) ; Date startTime = DatatypeConverter . parseBasicTime ( shifts [ index + 1 ] ) ; Date endTime = DatatypeConverter . parseBasicTime ( shifts [ index + 2 ] ) ; Map < String , Object > map = new HashMap < String , Object > ( ) ; map . put ( "START_TIME" , startTime ) ; map . put ( "END_TIME" , endTime ) ; map . put ( "EXCEPTIOP" , exceptionTypeID ) ; list . add ( new MapRow ( map ) ) ; index += 3 ; } } return list ; |
public class FormBuilder { /** * Append a field to the form .
* @ param key The form key , which must be URLEncoded before calling this method .
* @ param value The value associated with the key . The value will be URLEncoded by this method . */
public void appendField ( final String key , final String value ) { } } | if ( builder . length ( ) > 0 ) { builder . append ( "&" ) ; } try { builder . append ( key ) . append ( "=" ) . append ( URLEncoder . encode ( value , "UTF-8" ) ) ; } catch ( UnsupportedEncodingException e ) { throw new AssertionError ( "UTF-8 encoding should always be available." ) ; } |
public class FileUtil { /** * 读取文件的每行内容到List < String > .
* @ see { @ link Files # readAllLines } */
public static List < String > toLines ( final File file ) throws IOException { } } | return Files . readAllLines ( file . toPath ( ) , Charsets . UTF_8 ) ; |
public class ConfigurationValidation { /** * Check configuration errors */
public List < String > getConfigurationErrors ( boolean projectPerFolder , String configProjectToken , String configProjectName , String configApiToken , String configFilePath , int archiveDepth , String [ ] includes , String [ ] projectPerFolderIncludes , String [ ] pythonIncludes , String scanComment ) { } } | List < String > errors = new ArrayList < > ( ) ; String [ ] requirements = pythonIncludes [ Constants . ZERO ] . split ( Constants . WHITESPACE ) ; if ( StringUtils . isBlank ( configApiToken ) ) { String error = "Could not retrieve " + ORG_TOKEN_PROPERTY_KEY + " property from " + configFilePath ; errors . add ( error ) ; } boolean noProjectToken = StringUtils . isBlank ( configProjectToken ) ; boolean noProjectName = StringUtils . isBlank ( configProjectName ) ; if ( noProjectToken && noProjectName && ! projectPerFolder ) { String error = "Could not retrieve properties " + PROJECT_NAME_PROPERTY_KEY + " and " + PROJECT_TOKEN_PROPERTY_KEY + " from " + configFilePath ; errors . add ( error ) ; } else if ( ! noProjectToken && ! noProjectName ) { String error = "Please choose just one of either " + PROJECT_NAME_PROPERTY_KEY + " or " + PROJECT_TOKEN_PROPERTY_KEY + " (and not both)" ; errors . add ( error ) ; } if ( archiveDepth < Constants . ZERO || archiveDepth > Constants . MAX_EXTRACTION_DEPTH ) { errors . add ( "Error: archiveExtractionDepth value should be greater than 0 and less than " + Constants . MAX_EXTRACTION_DEPTH ) ; } if ( includes . length < Constants . ONE || StringUtils . isBlank ( includes [ Constants . ZERO ] ) ) { errors . add ( "Error: includes parameter must have at list one scanning pattern" ) ; } if ( projectPerFolder && projectPerFolderIncludes == null ) { errors . add ( "projectPerFolderIncludes parameter is empty, specify folders to include or mark as comment to scan all folders" ) ; } if ( requirements . length > Constants . ZERO ) { for ( String requirement : requirements ) { if ( ! requirement . endsWith ( Constants . TXT_EXTENSION ) ) { String error = "Invalid file name: " + requirement + Constants . WHITESPACE + "in property" + PYTHON_REQUIREMENTS_FILE_INCLUDES + "from " + configFilePath ; errors . add ( error ) ; } } } // get user comment & check max valid size
if ( ! StringUtils . isBlank ( scanComment ) ) { if ( scanComment . length ( ) > Constants . COMMENT_MAX_LENGTH ) { errors . add ( "Error: scanComment parameters is longer than 1000 characters" ) ; } } return errors ; |
public class Solo { /** * Get the location of a view on the screen
* @ param view - string reference to the view
* @ return - int array ( x , y ) of the view location
* @ throws Exception */
public static int [ ] getLocationOnScreen ( String view ) throws Exception { } } | int [ ] location = new int [ 2 ] ; JSONArray results = Client . getInstance ( ) . map ( Constants . ROBOTIUM_SOLO , "getLocationOnScreen" , view ) ; location [ 0 ] = results . getInt ( 0 ) ; location [ 1 ] = results . getInt ( 1 ) ; return location ; |
public class RetryableResource { /** * Recover exchange bindings using the { @ code channelSupplier } . */
void recoverExchangeBindings ( Iterable < Binding > exchangeBindings ) throws Exception { } } | if ( exchangeBindings != null ) synchronized ( exchangeBindings ) { for ( Binding binding : exchangeBindings ) try { log . info ( "Recovering exchange binding from {} to {} with {} via {}" , binding . source , binding . destination , binding . routingKey , this ) ; getRecoveryChannel ( ) . exchangeBind ( binding . destination , binding . source , binding . routingKey , binding . arguments ) ; } catch ( Exception e ) { log . error ( "Failed to recover exchange binding from {} to {} with {} via {}" , binding . source , binding . destination , binding . routingKey , this , e ) ; if ( throwOnRecoveryFailure ( ) || Exceptions . isCausedByConnectionClosure ( e ) ) throw e ; } } |
public class ServiceRefTypeImpl { /** * If not already created , a new < code > handler - chains < / code > element with the given value will be created .
* Otherwise , the existing < code > handler - chains < / code > element will be returned .
* @ return a new or existing instance of < code > HandlerChainsType < ServiceRefType < T > > < / code > */
public HandlerChainsType < ServiceRefType < T > > getOrCreateHandlerChains ( ) { } } | Node node = childNode . getOrCreate ( "handler-chains" ) ; HandlerChainsType < ServiceRefType < T > > handlerChains = new HandlerChainsTypeImpl < ServiceRefType < T > > ( this , "handler-chains" , childNode , node ) ; return handlerChains ; |
public class SessionAttributeInitializingFilter { /** * Puts all pre - configured attributes into the actual session attribute
* map and forward the event to the next filter . */
@ Override public void sessionCreated ( NextFilter nextFilter , IoSession session ) throws Exception { } } | for ( Map . Entry < String , Object > e : attributes . entrySet ( ) ) { session . setAttribute ( e . getKey ( ) , e . getValue ( ) ) ; } nextFilter . sessionCreated ( session ) ; |
public class Insert { /** * { @ inheritDoc } */
@ Override public void execute ( ) throws EFapsException { } } | final boolean hasAccess = getType ( ) . hasAccess ( Instance . get ( getType ( ) , 0 ) , AccessTypeEnums . CREATE . getAccessType ( ) , getNewValuesMap ( ) ) ; if ( ! hasAccess ) { Insert . LOG . error ( "Insert not permitted for Person: {} on Type: {}" , Context . getThreadContext ( ) . getPerson ( ) , getType ( ) ) ; throw new EFapsException ( getClass ( ) , "execute.NoAccess" , getType ( ) ) ; } executeWithoutAccessCheck ( ) ; |
public class SignerWithChooserByPrivateKeyIdImpl { /** * Signs a message .
* @ param privateKeyId the logical name of the private key as configured in
* the private key map
* @ param message the message to sign
* @ return the signature
* @ see # setPrivateKeyMap ( java . util . Map ) */
public byte [ ] sign ( String privateKeyId , byte [ ] message ) { } } | Signer signer = cache . get ( privateKeyId ) ; if ( signer != null ) { return signer . sign ( message ) ; } SignerImpl signerImpl = new SignerImpl ( ) ; PrivateKey privateKey = privateKeyMap . get ( privateKeyId ) ; if ( privateKey == null ) { throw new SignatureException ( "private key not found: privateKeyId=" + privateKeyId ) ; } signerImpl . setPrivateKey ( privateKey ) ; signerImpl . setAlgorithm ( algorithm ) ; signerImpl . setProvider ( provider ) ; cache . put ( privateKeyId , signerImpl ) ; return signerImpl . sign ( message ) ; |
public class CmsDateBox { /** * Validates the time and prints out an error message if the time format is incorrect . < p > */
private void checkTime ( ) { } } | if ( ! isValidTime ( ) ) { m_time . setErrorMessageWidth ( ( m_popup . getOffsetWidth ( ) - 32 ) + Unit . PX . toString ( ) ) ; m_time . setErrorMessage ( Messages . get ( ) . key ( Messages . ERR_DATEBOX_INVALID_TIME_FORMAT_0 ) ) ; } else { m_time . setErrorMessage ( null ) ; } updateCloseBehavior ( ) ; |
public class service { /** * Use this API to add service resources . */
public static base_responses add ( nitro_service client , service resources [ ] ) throws Exception { } } | base_responses result = null ; if ( resources != null && resources . length > 0 ) { service addresources [ ] = new service [ resources . length ] ; for ( int i = 0 ; i < resources . length ; i ++ ) { addresources [ i ] = new service ( ) ; addresources [ i ] . name = resources [ i ] . name ; addresources [ i ] . ip = resources [ i ] . ip ; addresources [ i ] . servername = resources [ i ] . servername ; addresources [ i ] . servicetype = resources [ i ] . servicetype ; addresources [ i ] . port = resources [ i ] . port ; addresources [ i ] . cleartextport = resources [ i ] . cleartextport ; addresources [ i ] . cachetype = resources [ i ] . cachetype ; addresources [ i ] . maxclient = resources [ i ] . maxclient ; addresources [ i ] . healthmonitor = resources [ i ] . healthmonitor ; addresources [ i ] . maxreq = resources [ i ] . maxreq ; addresources [ i ] . cacheable = resources [ i ] . cacheable ; addresources [ i ] . cip = resources [ i ] . cip ; addresources [ i ] . cipheader = resources [ i ] . cipheader ; addresources [ i ] . usip = resources [ i ] . usip ; addresources [ i ] . pathmonitor = resources [ i ] . pathmonitor ; addresources [ i ] . pathmonitorindv = resources [ i ] . pathmonitorindv ; addresources [ i ] . useproxyport = resources [ i ] . useproxyport ; addresources [ i ] . sc = resources [ i ] . sc ; addresources [ i ] . sp = resources [ i ] . sp ; addresources [ i ] . rtspsessionidremap = resources [ i ] . rtspsessionidremap ; addresources [ i ] . clttimeout = resources [ i ] . clttimeout ; addresources [ i ] . svrtimeout = resources [ i ] . svrtimeout ; addresources [ i ] . customserverid = resources [ i ] . customserverid ; addresources [ i ] . serverid = resources [ i ] . serverid ; addresources [ i ] . cka = resources [ i ] . cka ; addresources [ i ] . tcpb = resources [ i ] . tcpb ; addresources [ i ] . cmp = resources [ i ] . cmp ; addresources [ i ] . maxbandwidth = resources [ i ] . maxbandwidth ; addresources [ i ] . accessdown = resources [ i ] . accessdown ; addresources [ i ] . monthreshold = resources [ i ] . monthreshold ; addresources [ i ] . state = resources [ i ] . state ; addresources [ i ] . downstateflush = resources [ i ] . downstateflush ; addresources [ i ] . tcpprofilename = resources [ i ] . tcpprofilename ; addresources [ i ] . httpprofilename = resources [ i ] . httpprofilename ; addresources [ i ] . hashid = resources [ i ] . hashid ; addresources [ i ] . comment = resources [ i ] . comment ; addresources [ i ] . appflowlog = resources [ i ] . appflowlog ; addresources [ i ] . netprofile = resources [ i ] . netprofile ; addresources [ i ] . td = resources [ i ] . td ; } result = add_bulk_request ( client , addresources ) ; } return result ; |
public class OjbTagsHandler { /** * Sets the current collection definition derived from the current member , and optionally some attributes .
* @ param template The template
* @ param attributes The attributes of the tag
* @ exception XDocletException If an error occurs
* @ doc . tag type = " block "
* @ doc . param name = " attributes " optional = " true " description = " Attributes of the collection as name - value pairs ' name = value ' ,
* separated by commas "
* @ doc . param name = " auto - delete " optional = " true " description = " Whether to automatically delete the
* collection on object deletion "
* @ doc . param name = " auto - retrieve " optional = " true " description = " Whether to automatically retrieve
* the collection "
* @ doc . param name = " auto - update " optional = " true " description = " Whether to automatically update the
* collection "
* @ doc . param name = " collection - class " optional = " true " description = " The type of the collection if not a
* java . util type or an array "
* @ doc . param name = " database - foreignkey " optional = " true " description = " Whether a database foreignkey shall be created "
* values = " true , false "
* @ doc . param name = " documentation " optional = " true " description = " Documentation on the collection "
* @ doc . param name = " element - class - ref " optional = " true " description = " The fully qualified name of
* the element type "
* @ doc . param name = " foreignkey " optional = " true " description = " The name of the
* foreign keys ( columns when an indirection table is given ) "
* @ doc . param name = " foreignkey - documentation " optional = " true " description = " Documentation
* on the foreign keys as a comma - separated list if using an indirection table "
* @ doc . param name = " indirection - table " optional = " true " description = " The name of the indirection
* table for m : n associations "
* @ doc . param name = " indirection - table - documentation " optional = " true " description = " Documentation
* on the indirection table "
* @ doc . param name = " indirection - table - primarykeys " optional = " true " description = " Whether the
* fields referencing the collection and element classes , should also be primarykeys "
* @ doc . param name = " otm - dependent " optional = " true " description = " Whether the collection is dependent on otm "
* @ doc . param name = " proxy " optional = " true " description = " Whether to use a proxy for the collection "
* @ doc . param name = " proxy - prefetching - limit " optional = " true " description = " Specifies the amount of objects to prefetch "
* @ doc . param name = " query - customizer " optional = " true " description = " The query customizer for this collection "
* @ doc . param name = " query - customizer - attributes " optional = " true " description = " Attributes for the query customizer "
* @ doc . param name = " refresh " optional = " true " description = " Whether to automatically refresh the
* collection "
* @ doc . param name = " remote - foreignkey " optional = " true " description = " The name of the
* foreign key columns pointing to the elements if using an indirection table "
* @ doc . param name = " remote - foreignkey - documentation " optional = " true " description = " Documentation
* on the remote foreign keys as a comma - separated list if using an indirection table " */
public void processCollection ( String template , Properties attributes ) throws XDocletException { } } | String name = OjbMemberTagsHandler . getMemberName ( ) ; CollectionDescriptorDef collDef = _curClassDef . getCollection ( name ) ; String attrName ; if ( collDef == null ) { collDef = new CollectionDescriptorDef ( name ) ; _curClassDef . addCollection ( collDef ) ; } LogHelper . debug ( false , OjbTagsHandler . class , "processCollection" , " Processing collection " + collDef . getName ( ) ) ; for ( Enumeration attrNames = attributes . propertyNames ( ) ; attrNames . hasMoreElements ( ) ; ) { attrName = ( String ) attrNames . nextElement ( ) ; collDef . setProperty ( attrName , attributes . getProperty ( attrName ) ) ; } if ( OjbMemberTagsHandler . getMemberDimension ( ) > 0 ) { // we store the array - element type for later use
collDef . setProperty ( PropertyHelper . OJB_PROPERTY_ARRAY_ELEMENT_CLASS_REF , OjbMemberTagsHandler . getMemberType ( ) . getQualifiedName ( ) ) ; } else { collDef . setProperty ( PropertyHelper . OJB_PROPERTY_VARIABLE_TYPE , OjbMemberTagsHandler . getMemberType ( ) . getQualifiedName ( ) ) ; } _curCollectionDef = collDef ; generate ( template ) ; _curCollectionDef = null ; |
public class IpcLogEntry { /** * Set the exception that was thrown while trying to execute the request . This will be
* logged and can be used to fill in the error reason . */
public IpcLogEntry withException ( Throwable exception ) { } } | this . exception = exception ; if ( statusDetail == null ) { statusDetail = exception . getClass ( ) . getSimpleName ( ) ; } if ( status == null ) { status = IpcStatus . forException ( exception ) ; } return this ; |
public class Expressions { /** * Get the intersection of the given Boolean expressions
* @ param exprs predicates
* @ return intersection of predicates */
public static BooleanExpression allOf ( BooleanExpression ... exprs ) { } } | BooleanExpression rv = null ; for ( BooleanExpression b : exprs ) { rv = rv == null ? b : rv . and ( b ) ; } return rv ; |
public class HttpMethodBase { /** * Gets the response header associated with the given name . Header name
* matching is case insensitive . < tt > null < / tt > will be returned if either
* < i > headerName < / i > is < tt > null < / tt > or there is no matching header for
* < i > headerName < / i > .
* @ param headerName the header name to match
* @ return the matching header */
@ Override public Header getResponseHeader ( String headerName ) { } } | if ( headerName == null ) { return null ; } else { return getResponseHeaderGroup ( ) . getCondensedHeader ( headerName ) ; } |
public class TypeHandlerUtils { /** * Converts array of Object into sql . Array
* @ param conn connection for which sql . Array object would be created
* @ param array array of objects
* @ return sql . Array from array of Object
* @ throws SQLException */
public static Object convertArray ( Connection conn , Object [ ] array ) throws SQLException { } } | Object result = null ; result = createArrayOf ( conn , convertJavaClassToSqlType ( array . getClass ( ) . getComponentType ( ) . getSimpleName ( ) ) , array ) ; return result ; |
public class JaxWsDDHelper { /** * Get the PortComponent by ejb - link .
* @ param ejbLink
* @ param containerToAdapt
* @ return
* @ throws UnableToAdaptException */
static PortComponent getPortComponentByEJBLink ( String ejbLink , Adaptable containerToAdapt ) throws UnableToAdaptException { } } | return getHighLevelElementByServiceImplBean ( ejbLink , containerToAdapt , PortComponent . class , LinkType . EJB ) ; |
public class FileUtil { /** * Check if the file is well formatted regarding an extension prefix .
* Check also if the file doesn ' t exist .
* @ param file
* @ param prefix
* @ return
* @ throws SQLException
* @ throws java . io . FileNotFoundException */
public static boolean isFileImportable ( File file , String prefix ) throws SQLException , FileNotFoundException { } } | if ( isExtensionWellFormated ( file , prefix ) ) { if ( file . exists ( ) ) { return true ; } else { throw new FileNotFoundException ( "The following file does not exists:\n" + file . getPath ( ) ) ; } } else { throw new SQLException ( "Please use " + prefix + " extension." ) ; } |
public class NormalizedWord2VecModel { /** * Normalizes the vectors in this model */
private void normalize ( ) { } } | for ( int i = 0 ; i < vocab . size ( ) ; ++ i ) { double len = 0 ; for ( int j = i * layerSize ; j < ( i + 1 ) * layerSize ; ++ j ) len += vectors . get ( j ) * vectors . get ( j ) ; len = Math . sqrt ( len ) ; for ( int j = i * layerSize ; j < ( i + 1 ) * layerSize ; ++ j ) vectors . put ( j , vectors . get ( j ) / len ) ; } |
public class Main { /** * Processes listed classes given a JDK 9 home . */
boolean processJdk9 ( String jdkHome , Collection < String > classes ) throws IOException { } } | systemModules . add ( new File ( jdkHome ) ) ; return doClassNames ( classes ) ; |
public class QueryParametersLazyList { /** * Silently closes supplied result set
* @ param rs result set which should be closed */
private void closeResultSet ( ResultSet rs ) { } } | if ( closedResultSet . contains ( rs ) == false ) { MjdbcUtils . closeQuietly ( rs ) ; closedResultSet . add ( rs ) ; } |
public class RedisStrHashMap { /** * 查看缓存hash是否包含某个key
* @ param field
* @ return */
public boolean containsKey ( String field ) { } } | try { return getJedisCommands ( groupName ) . hexists ( key , field ) ; } finally { getJedisProvider ( groupName ) . release ( ) ; } |
public class Predicate { /** * Complete AND operation . Similar to { @ link # AND } but no
* short - circuit : in all situations , < code > a < / code > is evaluated
* and next < code > b < / code > is evaluated . Good for impure
* predicates . */
public static < T > Predicate < T > FULL_AND ( final Predicate < T > a , final Predicate < T > b ) { } } | return new Predicate < T > ( ) { public boolean check ( T obj ) { return a . check ( obj ) & b . check ( obj ) ; } } ; |
public class CmsHelpNavigationListView { /** * Returns a String of spaces . < p >
* @ param n the count of spaces
* @ return a String of spaces */
private static String getSpaces ( int n ) { } } | // avoid negative NegativeArraySizeException in case uri is missing
n = Math . max ( n , 0 ) ; StringBuffer result = new StringBuffer ( n ) ; for ( ; n > 0 ; n -- ) { result . append ( ' ' ) ; } return result . toString ( ) ; |
public class LastSplitsCallback { /** * When a Splits is stopped , it is added to the stopwatch a Last Splits attribute . */
@ Override public void onStopwatchStop ( Split split , StopwatchSample sample ) { } } | LastSplits lastSplits = getLastSplits ( split . getStopwatch ( ) ) ; lastSplits . add ( split ) ; lastSplits . log ( split ) ; |
public class FunctionTypeBuilder { /** * Infers the type of { @ code this } .
* @ param info The JSDocInfo for this function . */
FunctionTypeBuilder inferThisType ( JSDocInfo info ) { } } | if ( info != null && info . hasThisType ( ) ) { // TODO ( johnlenz ) : In ES5 strict mode a function can have a null or
// undefined " this " value , but all the existing " @ this " annotations
// don ' t declare restricted types .
JSType maybeThisType = info . getThisType ( ) . evaluate ( templateScope , typeRegistry ) . restrictByNotNullOrUndefined ( ) ; if ( maybeThisType != null ) { thisType = maybeThisType ; } } return this ; |
public class Stripe { /** * Blocking method to create a { @ link Token } for a Connect Account . Do not call this on the UI
* thread or your app will crash . The method uses the currently set
* { @ link # mDefaultPublishableKey } .
* @ param accountParams params to use for this token .
* @ return a { @ link Token } that can be used for this account .
* @ throws AuthenticationException failure to properly authenticate yourself ( check your key )
* @ throws InvalidRequestException your request has invalid parameters
* @ throws APIConnectionException failure to connect to Stripe ' s API
* @ throws APIException any other type of problem ( for instance , a temporary issue with
* Stripe ' s servers ) */
@ Nullable public Token createAccountTokenSynchronous ( @ NonNull final AccountParams accountParams ) throws AuthenticationException , InvalidRequestException , APIConnectionException , APIException { } } | return createAccountTokenSynchronous ( accountParams , mDefaultPublishableKey ) ; |
public class TaskOperations { /** * Lists the { @ link CloudTask tasks } of the specified job .
* @ param jobId
* The ID of the job .
* @ param detailLevel
* A { @ link DetailLevel } used for filtering the list and for
* controlling which properties are retrieved from the service .
* @ param additionalBehaviors
* A collection of { @ link BatchClientBehavior } instances that are
* applied to the Batch service request .
* @ return A list of { @ link CloudTask } objects .
* @ throws BatchErrorException
* Exception thrown when an error response is received from the
* Batch service .
* @ throws IOException
* Exception thrown when there is an error in
* serialization / deserialization of data sent to / received from the
* Batch service . */
public PagedList < CloudTask > listTasks ( String jobId , DetailLevel detailLevel , Iterable < BatchClientBehavior > additionalBehaviors ) throws BatchErrorException , IOException { } } | TaskListOptions options = new TaskListOptions ( ) ; BehaviorManager bhMgr = new BehaviorManager ( this . customBehaviors ( ) , additionalBehaviors ) ; bhMgr . appendDetailLevelToPerCallBehaviors ( detailLevel ) ; bhMgr . applyRequestBehaviors ( options ) ; return this . parentBatchClient . protocolLayer ( ) . tasks ( ) . list ( jobId , options ) ; |
public class ParentRunnerSpy { /** * Reflectively invokes a { @ link ParentRunner } ' s getFilteredChildren method . Manipulating this
* list lets us control which tests will be run . */
static < T > List < T > getFilteredChildren ( ParentRunner < T > parentRunner ) { } } | try { // noinspection unchecked
return new ArrayList < > ( ( Collection < T > ) getFilteredChildrenMethod . invoke ( parentRunner ) ) ; } catch ( IllegalAccessException | InvocationTargetException e ) { throw new RuntimeException ( "Failed to invoke getFilteredChildren()" , e ) ; } |
public class BinaryJedis { /** * Decrement the number stored at key by one . If the key does not exist or contains a value of a
* wrong type , set the key to the value of " 0 " before to perform the decrement operation .
* INCR commands are limited to 64 bit signed integers .
* Note : this is actually a string operation , that is , in Redis there are not " integer " types .
* Simply the string stored at the key is parsed as a base 10 64 bit signed integer , incremented ,
* and then converted back as a string .
* Time complexity : O ( 1)
* @ see # incr ( byte [ ] )
* @ see # incrBy ( byte [ ] , long )
* @ see # decrBy ( byte [ ] , long )
* @ param key
* @ return Integer reply , this commands will reply with the new value of key after the increment . */
@ Override public Long decr ( final byte [ ] key ) { } } | checkIsInMultiOrPipeline ( ) ; client . decr ( key ) ; return client . getIntegerReply ( ) ; |
public class TransactionIdManager { /** * Generate a unique id that contains a timestamp , a counter
* and a siteid packed into a 64 - bit long value . Subsequent calls
* to this method will return strictly larger long values .
* @ return The newly generated transaction id . */
public long getNextUniqueTransactionId ( ) { } } | // get the current time , usually the salt value is zero
// in testing it is used to simulate clock skew
long currentTime = m_clock . get ( ) + m_timestampTestingSalt ; if ( currentTime == lastUsedTime ) { // increment the counter for this millisecond
counterValue ++ ; // handle the case where we ' ve run out of counter values
// for this particular millisecond ( feels unlikely )
if ( counterValue > COUNTER_MAX_VALUE ) { // spin until the next millisecond
while ( currentTime == lastUsedTime ) { currentTime = m_clock . get ( ) ; } // reset the counter and lastUsedTime for the new millisecond
lastUsedTime = currentTime ; counterValue = 0 ; } } else { // reset the counter and lastUsedTime for the new millisecond
if ( currentTime < lastUsedTime ) { VoltLogger log = new VoltLogger ( "HOST" ) ; double diffSeconds = ( lastUsedTime - currentTime ) / 1000.0 ; String msg = String . format ( "Initiator time moved backwards from: %d to %d, a difference of %.2f seconds." , lastUsedTime , currentTime , diffSeconds ) ; log . error ( msg ) ; System . err . println ( msg ) ; // if the diff is less than some specified amount of time , wait a bit
if ( ( lastUsedTime - currentTime ) < BACKWARD_TIME_FORGIVENESS_WINDOW_MS ) { log . info ( "This node will delay any stored procedures sent to it." ) ; log . info ( String . format ( "This node will resume full operation in %.2f seconds." , diffSeconds ) ) ; long count = BACKWARD_TIME_FORGIVENESS_WINDOW_MS ; // note , the loop should stop once lastUsedTime is PASSED , not current
while ( ( currentTime <= lastUsedTime ) && ( count -- > 0 ) ) { try { m_clock . sleep ( 1 ) ; } catch ( InterruptedException e ) { } currentTime = m_clock . get ( ) ; } // if the loop above ended because it ran too much
if ( count < 0 ) { org . voltdb . VoltDB . crashLocalVoltDB ( "VoltDB was unable to recover after the system time was externally negatively adusted. " + "It is possible that there is a serious system time or NTP error. " , false , null ) ; } } // crash immediately if time has gone backwards by too much
else { org . voltdb . VoltDB . crashLocalVoltDB ( String . format ( "%.2f is larger than the max allowable number of seconds that " + "the clock can be negatively adjusted (%d)" , diffSeconds , BACKWARD_TIME_FORGIVENESS_WINDOW_MS / 1000 ) , false , null ) ; } } lastUsedTime = currentTime ; counterValue = 0 ; } lastTxnId = makeIdFromComponents ( currentTime , counterValue , initiatorId ) ; return lastTxnId ; |
public class ClassPathGeneratorHelper { /** * Checks if the entry is a direct child of the root Entry
* isDirectChildPath ( ' / a / b / c / ' , ' / a / b / c / d . txt ' ) = > true isDirectChildPath (
* ' / a / b / c / ' , ' / a / b / c / d / ' ) = > true isDirectChildPath ( ' / a / b / c / ' ,
* ' / a / b / c / d / e . txt ' ) = > false
* @ param rootEntryPath
* the root entry path
* @ param entryPath
* the entry path to check
* @ return true if the entry is a direct child of the root Entry */
private boolean isDirectChildPath ( String rootEntryPath , String entryPath ) { } } | boolean result = false ; if ( entryPath . length ( ) > rootEntryPath . length ( ) && entryPath . startsWith ( rootEntryPath ) ) { int idx = entryPath . indexOf ( URL_SEPARATOR , rootEntryPath . length ( ) ) ; if ( idx == - 1 ) { // case where the entry is a child file
// / a / b / c / d . txt
result = true ; } else { if ( entryPath . length ( ) == idx + 1 ) { // case where the entry is
// a child file
// / a / b / c / d /
result = true ; } } } return result ; |
public class AbstractBitOutput { /** * Writes an unsigned value whose size is { @ value Short # SIZE } in maximum .
* @ param size the number of lower bits to write ; between { @ code 1 } and { @ value Short # SIZE } , both inclusive .
* @ param value the value to write
* @ throws IOException if an I / O error occurs */
protected void unsigned16 ( final int size , final int value ) throws IOException { } } | requireValidSizeUnsigned16 ( size ) ; final int quotient = size / Byte . SIZE ; final int remainder = size % Byte . SIZE ; if ( remainder > 0 ) { unsigned8 ( remainder , value >> ( quotient * Byte . SIZE ) ) ; } for ( int i = quotient - 1 ; i >= 0 ; i -- ) { unsigned8 ( Byte . SIZE , value >> ( Byte . SIZE * i ) ) ; } |
public class GcmRegistration { /** * private final static int PLAY _ SERVICES _ RESOLUTION _ REQUEST = 9000; */
protected static void getRegistrationId ( OrtcClient ortcClient ) { } } | if ( checkPlayServices ( ortcClient ) ) { gcm = GoogleCloudMessaging . getInstance ( ortcClient . appContext ) ; if ( ortcClient . registrationId . isEmpty ( ) ) { String regid = getRegistrationId ( ortcClient . appContext ) ; ortcClient . registrationId = regid ; if ( regid . isEmpty ( ) ) { registerInBackground ( ortcClient ) ; } } } else { ortcClient . raiseOrtcEvent ( EventEnum . OnException , ortcClient , new OrtcGcmException ( "No valid Google Play Services APK found." ) ) ; } |
public class RBBIRuleBuilder { static void compileRules ( String rules , OutputStream os ) throws IOException { } } | // Read the input rules , generate a parse tree , symbol table ,
// and list of all Unicode Sets referenced by the rules .
RBBIRuleBuilder builder = new RBBIRuleBuilder ( rules ) ; builder . fScanner . parse ( ) ; // UnicodeSet processing .
// Munge the Unicode Sets to create a set of character categories .
// Generate the mapping tables ( TRIE ) from input 32 - bit characters to
// the character categories .
builder . fSetBuilder . build ( ) ; // Generate the DFA state transition table .
builder . fForwardTables = new RBBITableBuilder ( builder , fForwardTree ) ; builder . fReverseTables = new RBBITableBuilder ( builder , fReverseTree ) ; builder . fSafeFwdTables = new RBBITableBuilder ( builder , fSafeFwdTree ) ; builder . fSafeRevTables = new RBBITableBuilder ( builder , fSafeRevTree ) ; builder . fForwardTables . build ( ) ; builder . fReverseTables . build ( ) ; builder . fSafeFwdTables . build ( ) ; builder . fSafeRevTables . build ( ) ; if ( builder . fDebugEnv != null && builder . fDebugEnv . indexOf ( "states" ) >= 0 ) { builder . fForwardTables . printRuleStatusTable ( ) ; } // Package up the compiled data , writing it to an output stream
// in the serialization format . This is the same as the ICU4C runtime format .
builder . flattenData ( os ) ; |
public class HString { /** * Gets this HString as an annotation . If the HString is already an annotation it is simply cast . Otherwise a
* detached annotation of type < code > AnnotationType . ROOT < / code > is created .
* @ return An annotation . */
public Annotation asAnnotation ( ) { } } | if ( this instanceof Annotation ) { return Cast . as ( this ) ; } else if ( document ( ) != null ) { return document ( ) . annotationBuilder ( ) . type ( AnnotationType . ROOT ) . bounds ( this ) . attributes ( this ) . createDetached ( ) ; } return Fragments . detachedAnnotation ( AnnotationType . ROOT , start ( ) , end ( ) ) ; |
public class ProxyImpl { /** * In sequential proxying get some untried branch and start it , then wait for response and repeat */
public void startNextUntriedBranch ( ) { } } | if ( this . parallel ) throw new IllegalStateException ( "This method is only for sequantial proxying" ) ; for ( final MobicentsProxyBranch pbi : this . proxyBranches . values ( ) ) { // Issue http : / / code . google . com / p / mobicents / issues / detail ? id = 2461
// don ' t start the branch is it has been cancelled already
if ( ! pbi . isStarted ( ) && ! pbi . isCanceled ( ) ) { pbi . start ( ) ; return ; } } |
public class GenerationMojo { /** * Load an optional model from the project resources .
* @ return Model or empty optional if not present . */
private < T > Optional < T > loadOptionalModel ( Class < T > clzz , String location ) { } } | return ModelLoaderUtils . loadOptionalModel ( clzz , getResourceLocation ( location ) ) ; |
public class CommerceDiscountUserSegmentRelLocalServiceBaseImpl { /** * Adds the commerce discount user segment rel to the database . Also notifies the appropriate model listeners .
* @ param commerceDiscountUserSegmentRel the commerce discount user segment rel
* @ return the commerce discount user segment rel that was added */
@ Indexable ( type = IndexableType . REINDEX ) @ Override public CommerceDiscountUserSegmentRel addCommerceDiscountUserSegmentRel ( CommerceDiscountUserSegmentRel commerceDiscountUserSegmentRel ) { } } | commerceDiscountUserSegmentRel . setNew ( true ) ; return commerceDiscountUserSegmentRelPersistence . update ( commerceDiscountUserSegmentRel ) ; |
public class SerializationUtilities { /** * Serialize an object to disk .
* @ param file
* the file to write to .
* @ param obj
* the object to write .
* @ throws IOException */
public static void serializeToDisk ( File file , Object obj ) throws IOException { } } | byte [ ] serializedObj = serialize ( obj ) ; try ( RandomAccessFile raFile = new RandomAccessFile ( file , "rw" ) ) { raFile . write ( serializedObj ) ; } |
public class CreateInsightRequestMarshaller { /** * Marshall the given parameter object . */
public void marshall ( CreateInsightRequest createInsightRequest , ProtocolMarshaller protocolMarshaller ) { } } | if ( createInsightRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( createInsightRequest . getName ( ) , NAME_BINDING ) ; protocolMarshaller . marshall ( createInsightRequest . getFilters ( ) , FILTERS_BINDING ) ; protocolMarshaller . marshall ( createInsightRequest . getGroupByAttribute ( ) , GROUPBYATTRIBUTE_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class ServletInvocationEvent { /** * Get the request used for the servlet invocation . */
public HttpServletRequest getRequest ( ) { } } | // moved as part of LIDB - 3598 to ServletUtil
/* ServletRequest r = _ req ;
while ( ! ( r instanceof HttpServletRequest ) )
if ( r instanceof ServletRequestWrapper )
r = ( ( ServletRequestWrapper ) r ) . getRequest ( ) ;
return ( HttpServletRequest ) r ; */
// begin 311003 , 61FVT : Simple SIP request generating exception
ServletRequest sReq = null ; if ( _req == null ) return null ; try { sReq = ServletUtil . unwrapRequest ( _req ) ; } catch ( RuntimeException re ) { if ( com . ibm . ejs . ras . TraceComponent . isAnyTracingEnabled ( ) && logger . isLoggable ( Level . FINE ) ) logger . logp ( Level . FINE , CLASS_NAME , "getRequest" , "Caught RuntimeException unwrapping the request" , re ) ; return null ; } // end 311003 , 61FVT : Simple SIP request generating exception
if ( sReq instanceof HttpServletRequest ) return ( HttpServletRequest ) sReq ; else return null ; |
public class DoubleStreamEx { /** * Returns the maximum element of this stream according to the provided key
* extractor function .
* This is a terminal operation .
* @ param keyExtractor a non - interfering , stateless function
* @ return an { @ code OptionalDouble } describing the first element of this
* stream for which the highest value was returned by key extractor ,
* or an empty { @ code OptionalDouble } if the stream is empty
* @ since 0.1.2 */
public OptionalDouble maxByInt ( DoubleToIntFunction keyExtractor ) { } } | return collect ( PrimitiveBox :: new , ( box , d ) -> { int key = keyExtractor . applyAsInt ( d ) ; if ( ! box . b || box . i < key ) { box . b = true ; box . i = key ; box . d = d ; } } , PrimitiveBox . MAX_INT ) . asDouble ( ) ; |
public class Ifc2x3tc1FactoryImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
public String convertIfcAnalysisTheoryTypeEnumToString ( EDataType eDataType , Object instanceValue ) { } } | return instanceValue == null ? null : instanceValue . toString ( ) ; |
public class MessageBatch { /** * Returns the total number of bytes of the message batch ( by calling { @ link org . jgroups . Message # getLength ( ) } on all messages ) */
public int length ( ) { } } | int retval = 0 ; for ( int i = 0 ; i < index ; i ++ ) retval += length_visitor . applyAsInt ( messages [ i ] , this ) ; return retval ; |
public class FileUtil { /** * Checks if the extension is valid . This method only permits letters , digits ,
* and an underscore character .
* @ param extension The file extension to validate
* @ return True if its valid , otherwise false */
public static boolean isValidFileExtension ( String extension ) { } } | for ( int i = 0 ; i < extension . length ( ) ; i ++ ) { char c = extension . charAt ( i ) ; if ( ! ( Character . isDigit ( c ) || Character . isLetter ( c ) || c == '_' ) ) { return false ; } } return true ; |
public class AfplibFactoryImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
public FNCPatAlign createFNCPatAlignFromString ( EDataType eDataType , String initialValue ) { } } | FNCPatAlign result = FNCPatAlign . get ( initialValue ) ; if ( result == null ) throw new IllegalArgumentException ( "The value '" + initialValue + "' is not a valid enumerator of '" + eDataType . getName ( ) + "'" ) ; return result ; |
public class ByteBuffer { /** * Gets a copy of the current buffer as byte array , but the new byte [ ]
* has the specified capacity . Useful if you need to store additional bytes
* in the returned byte [ ] and dont ' want to do an additional System . arraycopy ( )
* afterwards . Method will allocate memory to hold a copy of the current array
* and return it .
* @ param offset The offset to start from
* @ param length The length from the offset
* @ param capacity The size of the new byte [ ] . Must be > = this buffer ' s size ( )
* @ return A byte array . Could be empty .
* @ throws IllegalArgumentException If capacity isn ' t large enough enough
* to hold the new byte [ ] */
public byte [ ] toArray ( int offset , int length , int capacity ) { } } | // validate the offset , length are ok
ByteBuffer . checkOffsetLength ( size ( ) , offset , length ) ; // will we have a large enough byte [ ] allocated ?
if ( capacity < length ) { throw new IllegalArgumentException ( "Capacity must be large enough to hold a byte[] of at least a size=" + length ) ; } byte [ ] arrayCopy = new byte [ capacity ] ; this . toArray ( offset , length , arrayCopy , 0 ) ; return arrayCopy ; |
public class DefaultGroovyMethods { /** * Returns a < code > BufferedIterator < / code > that allows examining the next element without
* consuming it .
* < pre class = " groovyTestCase " >
* assert [ 1 , 2 , 3 , 4 ] . iterator ( ) . buffered ( ) . with { [ head ( ) , toList ( ) ] } = = [ 1 , [ 1 , 2 , 3 , 4 ] ]
* < / pre >
* @ param self an iterator object
* @ return a BufferedIterator wrapping self
* @ since 2.5.0 */
public static < T > BufferedIterator < T > buffered ( Iterator < T > self ) { } } | if ( self instanceof BufferedIterator ) { return ( BufferedIterator < T > ) self ; } else { return new IteratorBufferedIterator < T > ( self ) ; } |
public class DepAnn { /** * Reports a dep - ann error for a declaration if : ( 1 ) javadoc contains the deprecated javadoc tag
* ( 2 ) the declaration is not annotated with { @ link java . lang . Deprecated } */
@ SuppressWarnings ( "javadoc" ) private Description checkDeprecatedAnnotation ( Tree tree , VisitorState state ) { } } | Symbol symbol = ASTHelpers . getSymbol ( tree ) ; // javac sets the DEPRECATED bit in flags if the Javadoc contains @ deprecated
if ( ( symbol . flags ( ) & DEPRECATED ) == 0 ) { return Description . NO_MATCH ; } if ( symbol . attribute ( state . getSymtab ( ) . deprecatedType . tsym ) != null ) { return Description . NO_MATCH ; } return describeMatch ( tree , SuggestedFix . prefixWith ( tree , "@Deprecated\n" ) ) ; |
public class CmsFlexCache { /** * Clears all entries in the cache , online or offline . < p >
* The keys are not cleared . < p >
* Only users with administrator permissions are allowed
* to perform this operation . < p > */
private synchronized void clearEntries ( ) { } } | if ( ! isEnabled ( ) ) { return ; } if ( LOG . isInfoEnabled ( ) ) { LOG . info ( Messages . get ( ) . getBundle ( ) . key ( Messages . LOG_FLEXCACHE_CLEAR_ALL_0 ) ) ; } // create new set to avoid ConcurrentModificationExceptions
Set < String > cacheKeys = synchronizedCopyKeys ( m_keyCache ) ; Iterator < String > i = cacheKeys . iterator ( ) ; while ( i . hasNext ( ) ) { CmsFlexCacheVariation v = m_keyCache . get ( i . next ( ) ) ; Iterator < I_CmsLruCacheObject > allEntries = v . m_map . values ( ) . iterator ( ) ; while ( allEntries . hasNext ( ) ) { I_CmsLruCacheObject nextObject = allEntries . next ( ) ; allEntries . remove ( ) ; m_variationCache . remove ( nextObject ) ; } v . m_map = new Hashtable < String , I_CmsLruCacheObject > ( INITIAL_CAPACITY_VARIATIONS ) ; } m_size = 0 ; |
public class ULocale { /** * Append a tag to a StringBuilder , adding the separator if necessary . The tag must
* not be a zero - length string .
* @ param tag The tag to add .
* @ param buffer The output buffer . */
private static void appendTag ( String tag , StringBuilder buffer ) { } } | if ( buffer . length ( ) != 0 ) { buffer . append ( UNDERSCORE ) ; } buffer . append ( tag ) ; |
public class Item { /** * Sets the item to a BootstrapMethod item .
* @ param position
* position in byte in the class attribute BootrapMethods .
* @ param hashCode
* hashcode of the item . This hashcode is processed from the
* hashcode of the bootstrap method and the hashcode of all
* bootstrap arguments . */
void set ( int position , int hashCode ) { } } | this . type = ClassWriter . BSM ; this . intVal = position ; this . hashCode = hashCode ; |
public class DashPackageMarshaller { /** * Marshall the given parameter object . */
public void marshall ( DashPackage dashPackage , ProtocolMarshaller protocolMarshaller ) { } } | if ( dashPackage == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( dashPackage . getEncryption ( ) , ENCRYPTION_BINDING ) ; protocolMarshaller . marshall ( dashPackage . getManifestLayout ( ) , MANIFESTLAYOUT_BINDING ) ; protocolMarshaller . marshall ( dashPackage . getManifestWindowSeconds ( ) , MANIFESTWINDOWSECONDS_BINDING ) ; protocolMarshaller . marshall ( dashPackage . getMinBufferTimeSeconds ( ) , MINBUFFERTIMESECONDS_BINDING ) ; protocolMarshaller . marshall ( dashPackage . getMinUpdatePeriodSeconds ( ) , MINUPDATEPERIODSECONDS_BINDING ) ; protocolMarshaller . marshall ( dashPackage . getPeriodTriggers ( ) , PERIODTRIGGERS_BINDING ) ; protocolMarshaller . marshall ( dashPackage . getProfile ( ) , PROFILE_BINDING ) ; protocolMarshaller . marshall ( dashPackage . getSegmentDurationSeconds ( ) , SEGMENTDURATIONSECONDS_BINDING ) ; protocolMarshaller . marshall ( dashPackage . getSegmentTemplateFormat ( ) , SEGMENTTEMPLATEFORMAT_BINDING ) ; protocolMarshaller . marshall ( dashPackage . getStreamSelection ( ) , STREAMSELECTION_BINDING ) ; protocolMarshaller . marshall ( dashPackage . getSuggestedPresentationDelaySeconds ( ) , SUGGESTEDPRESENTATIONDELAYSECONDS_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class MG2Encoder { /** * Convert the normals to a new coordinate system : magnitude , phi , theta
* ( relative to predicted smooth normals ) .
* @ param vertices vertex data
* @ param normals normal data
* @ param indices model indices
* @ param sortVertices sorted vertices
* @ return encoded normals */
private int [ ] makeNormalDeltas ( float [ ] vertices , float [ ] normals , int [ ] indices , SortableVertex [ ] sortVertices ) { } } | // Calculate smooth normals ( Note : aVertices and aIndices use the sorted
// index space , so smoothNormals will too )
float [ ] smoothNormals = calcSmoothNormals ( vertices , indices ) ; // Normal scaling factor
float scale = 1.0f / normalPrecision ; int vc = vertices . length / CTM_POSITION_ELEMENT_COUNT ; int [ ] intNormals = new int [ vc * CTM_NORMAL_ELEMENT_COUNT ] ; for ( int i = 0 ; i < vc ; ++ i ) { // Get old normal index ( before vertex sorting )
int oldIdx = sortVertices [ i ] . originalIndex * Mesh . CTM_NORMAL_ELEMENT_COUNT ; int newIdx = i * Mesh . CTM_NORMAL_ELEMENT_COUNT ; // Calculate normal magnitude ( should always be 1.0 for unit length normals )
float magn = ( float ) sqrt ( normals [ oldIdx ] * normals [ oldIdx ] + normals [ oldIdx + 1 ] * normals [ oldIdx + 1 ] + normals [ oldIdx + 2 ] * normals [ oldIdx + 2 ] ) ; if ( magn < 1e-10f ) { magn = 1.0f ; } // Invert magnitude if the normal is negative compared to the predicted
// smooth normal
if ( ( smoothNormals [ newIdx ] * normals [ oldIdx ] + smoothNormals [ newIdx + 1 ] * normals [ oldIdx + 1 ] + smoothNormals [ newIdx + 2 ] * normals [ oldIdx + 2 ] ) < 0.0f ) { magn = - magn ; } // Store the magnitude in the first element of the three normal elements
intNormals [ newIdx ] = ( int ) floor ( scale * magn + 0.5f ) ; // Normalize the normal ( 1 / magn ) - and flip it if magn < 0
magn = 1.0f / magn ; float [ ] n = new float [ 3 ] ; for ( int j = 0 ; j < 3 ; ++ j ) { n [ j ] = normals [ oldIdx + j ] * magn ; } // Convert the normal to angular representation ( phi , theta ) in a coordinate
// system where the nominal ( smooth ) normal is the Z - axis
float [ ] basisAxes = makeNormalCoordSys ( smoothNormals , newIdx ) ; float [ ] n2 = new float [ 3 ] ; for ( int j = 0 ; j < 3 ; ++ j ) { int id = j * Mesh . CTM_NORMAL_ELEMENT_COUNT ; n2 [ j ] = basisAxes [ id ] * n [ 0 ] + basisAxes [ id + 1 ] * n [ 1 ] + basisAxes [ id + 2 ] * n [ 2 ] ; } double phi , theta , thetaScale ; if ( n2 [ 2 ] >= 1.0f ) { phi = 0.0f ; } else { phi = acos ( n2 [ 2 ] ) ; } theta = atan2 ( n2 [ 1 ] , n2 [ 0 ] ) ; // Round phi and theta ( spherical coordinates ) to integers . Note : We let the
// theta resolution vary with the x / y circumference ( roughly phi ) .
int intPhi = ( int ) floor ( phi * ( scale / ( 0.5 * PI ) ) + 0.5 ) ; if ( intPhi == 0 ) { thetaScale = 0.0 ; } else if ( intPhi <= 4 ) { thetaScale = 2.0 / PI ; } else { thetaScale = intPhi / ( 2.0 * PI ) ; } intNormals [ newIdx + 1 ] = intPhi ; intNormals [ newIdx + 2 ] = ( int ) floor ( ( theta + PI ) * thetaScale + 0.5f ) ; } return intNormals ; |
public class Jdt2Ecore { /** * Replies if the given method is marked has automatically generated by the SARL compiler .
* @ param method the method to check .
* @ return < code > true < / code > if the method is annoted with SyntheticMember ; < code > false < / code >
* otherwise . */
public boolean isGeneratedOperation ( IMethod method ) { } } | return getAnnotation ( method , SyntheticMember . class . getName ( ) ) != null || getAnnotation ( method , Generated . class . getName ( ) ) != null ; |
public class ModelReflector { /** * Find extension points of the form prefix . < i > modelClass . getSimpleName ( ) < / i > . suffix for all relevant models in the right sequence .
* @ param prefix
* @ param suffix
* @ return */
public List < String > getExtensionPoints ( String prefix , String suffix ) { } } | return extensionPointsCache . get ( prefix + "@" + suffix ) ; |
public class AbstractSqlBuilder { /** * 设置float类型参数 .
* @ param fieldName 参数名
* @ param value 参数值 */
public void setFloat ( String fieldName , Float value ) { } } | if ( value == null ) { throw new IllegalArgumentException ( "参数值[" + fieldName + "]不能为NULL." ) ; } fieldList . add ( fieldName ) ; statementParameter . setFloat ( value ) ; |
public class XMLChar { /** * Check to see if a string is a valid NCName according to [ 4]
* from the XML Namespaces 1.0 Recommendation
* @ param ncName string to check
* @ return true if name is a valid NCName */
public static boolean isValidNCName ( String ncName ) { } } | final int length = ncName . length ( ) ; if ( length == 0 ) { return false ; } char ch = ncName . charAt ( 0 ) ; if ( ! isNCNameStart ( ch ) ) { return false ; } for ( int i = 1 ; i < length ; ++ i ) { ch = ncName . charAt ( i ) ; if ( ! isNCName ( ch ) ) { return false ; } } return true ; |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.