signature stringlengths 43 39.1k | implementation stringlengths 0 450k |
|---|---|
public class PersistentPropertyStorage { /** * Factory method for PersistentPropertyStorage . Guarantees that only a
* single PersistentPropertyStorage object exists for any property file .
* @ param propertyFile the name of the property file to use
* @ throws IOException if < code > propertyFile < / code > cannot be converted into
* a canonical path name ( via { @ link File # getCanonicalPath ( ) } ) . */
synchronized static PersistentPropertyStorage newPersistentPropertyStorage ( String propertyFile ) throws IOException { } } | File file = new File ( propertyFile ) ; String canonicalName = file . getCanonicalPath ( ) ; if ( propertyFileMap . containsKey ( canonicalName ) ) { return ( PersistentPropertyStorage ) propertyFileMap . get ( canonicalName ) ; } PersistentPropertyStorage storage = new PersistentPropertyStorage ( file ) ; propertyFileMap . put ( canonicalName , storage ) ; return storage ; |
public class OpenBitSet { /** * clears a bit .
* The index should be less than the OpenBitSet size . */
public void fastClear ( int index ) { } } | assert index >= 0 && index < numBits ; int wordNum = index >> 6 ; int bit = index & 0x03f ; long bitmask = 1L << bit ; bits [ wordNum ] &= ~ bitmask ; // hmmm , it takes one more instruction to clear than it does to set . . . any
// way to work around this ? If there were only 63 bits per word , we could
// use a right shift of 1011111 . . . 111 in binary to position the 0 in the
// correct place ( using sign extension ) .
// Could also use Long . rotateRight ( ) or rotateLeft ( ) * if * they were converted
// by the JVM into a native instruction .
// bits [ word ] & = Long . rotateLeft ( 0xfffffe , bit ) ; |
public class IfcStructuralItemImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
@ SuppressWarnings ( "unchecked" ) public EList < IfcRelConnectsStructuralActivity > getAssignedStructuralActivity ( ) { } } | return ( EList < IfcRelConnectsStructuralActivity > ) eGet ( Ifc2x3tc1Package . Literals . IFC_STRUCTURAL_ITEM__ASSIGNED_STRUCTURAL_ACTIVITY , true ) ; |
public class RunMatchFilter { /** * TODO dogpile days check override @ Override */
public void write ( DataOutput out ) throws IOException { } } | out . writeInt ( appId . length ) ; out . write ( appId ) ; out . writeInt ( maxCount ) ; |
public class ObjectSchema { /** * Adds a validation schema for a required object property .
* @ param name a property name .
* @ param type ( optional ) a property schema or type .
* @ param rules ( optional ) a list of property validation rules .
* @ return the validation schema */
public ObjectSchema withRequiredProperty ( String name , Object type , IValidationRule ... rules ) { } } | _properties = _properties != null ? _properties : new ArrayList < PropertySchema > ( ) ; PropertySchema schema = new PropertySchema ( name , type ) ; schema . setRules ( Arrays . asList ( rules ) ) ; schema . makeRequired ( ) ; return withProperty ( schema ) ; |
public class SystemApi { /** * Get analytics config .
* Returns exist config .
* @ return ApiResponse & lt ; GetConfigResponse & gt ;
* @ throws ApiException If fail to call the API , e . g . server error or cannot deserialize the response body */
public ApiResponse < GetConfigResponse > getConfigWithHttpInfo ( ) throws ApiException { } } | com . squareup . okhttp . Call call = getConfigValidateBeforeCall ( null , null ) ; Type localVarReturnType = new TypeToken < GetConfigResponse > ( ) { } . getType ( ) ; return apiClient . execute ( call , localVarReturnType ) ; |
public class PreparedStatement { /** * Creates BLOB from input stream .
* @ param stream Input stream
* @ param length */
private acolyte . jdbc . Blob createBlob ( InputStream stream , long length ) throws SQLException { } } | final acolyte . jdbc . Blob blob = acolyte . jdbc . Blob . Nil ( ) ; blob . setBytes ( 0L , createBytes ( stream , length ) ) ; return blob ; |
public class AWSMobileClient { /** * Exports project configuration to a snapshot which can be downloaded and shared . Note that mobile app push
* credentials are encrypted in exported projects , so they can only be shared successfully within the same AWS
* account .
* @ param exportProjectRequest
* Request structure used in requests to export project configuration details .
* @ return Result of the ExportProject operation returned by the service .
* @ throws InternalFailureException
* The service has encountered an unexpected error condition which prevents it from servicing the request .
* @ throws ServiceUnavailableException
* The service is temporarily unavailable . The request should be retried after some time delay .
* @ throws UnauthorizedException
* Credentials of the caller are insufficient to authorize the request .
* @ throws TooManyRequestsException
* Too many requests have been received for this AWS account in too short a time . The request should be
* retried after some time delay .
* @ throws BadRequestException
* The request cannot be processed because some parameter is not valid or the project state prevents the
* operation from being performed .
* @ throws NotFoundException
* No entity can be found with the specified identifier .
* @ sample AWSMobile . ExportProject
* @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / mobile - 2017-07-01 / ExportProject " target = " _ top " > AWS API
* Documentation < / a > */
@ Override public ExportProjectResult exportProject ( ExportProjectRequest request ) { } } | request = beforeClientExecution ( request ) ; return executeExportProject ( request ) ; |
public class LambdaDslJsonArray { /** * Element that must match the given timestamp format
* @ param format timestamp format
* @ param example example date and time to use for generated bodies */
public LambdaDslJsonArray timestamp ( final String format , final Instant example ) { } } | pactArray . timestamp ( format , example ) ; return this ; |
public class ModelsImpl { /** * Adds an intent classifier to the application .
* @ param appId The application ID .
* @ param versionId The version ID .
* @ param addIntentOptionalParameter the object representing the optional parameters to be set before calling this API
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the observable to the UUID object */
public Observable < UUID > addIntentAsync ( UUID appId , String versionId , AddIntentOptionalParameter addIntentOptionalParameter ) { } } | return addIntentWithServiceResponseAsync ( appId , versionId , addIntentOptionalParameter ) . map ( new Func1 < ServiceResponse < UUID > , UUID > ( ) { @ Override public UUID call ( ServiceResponse < UUID > response ) { return response . body ( ) ; } } ) ; |
public class MessageEndpointHandlerPool { /** * LI2110.56 - added entire method . */
@ Override public void discard ( Object o ) { } } | final boolean isTraceOn = TraceComponent . isAnyTracingEnabled ( ) ; if ( isTraceOn && tc . isEntryEnabled ( ) ) Tr . entry ( tc , "MEF.discard" ) ; ivMessageEnpointHandlerFactory . discard ( ) ; MessageEndpointBase meh = ( MessageEndpointBase ) o ; MessageEndpointBase . discard ( meh ) ; if ( isTraceOn && tc . isEntryEnabled ( ) ) Tr . exit ( tc , "MEF.discard" ) ; |
public class RubyHelper { /** * Calls " require ' requirement ' " in the Ruby provided .
* @ return boolean If successful , returns true , otherwise false . */
public static boolean requireIfAvailable ( Ruby ruby , String requirement , boolean logErrors ) { } } | boolean success = false ; try { StringBuilder script = new StringBuilder ( ) ; script . append ( "require %q(" ) ; script . append ( requirement ) ; script . append ( ")\n" ) ; evalScriptlet ( ruby , script . toString ( ) , false ) ; success = true ; } catch ( Throwable t ) { success = false ; if ( logErrors ) { log . debug ( "Error encountered. Unable to require file: " + requirement , t ) ; } } return success ; |
public class JointFragments { /** * Stores the alignment between the residues of several fragments .
* Each int [ ] stores the residue numbers of several equivalent residues . */
public void setIdxlist ( List < int [ ] > idxs ) { } } | Iterator < int [ ] > iter = idxs . iterator ( ) ; while ( iter . hasNext ( ) ) { int [ ] e = iter . next ( ) ; idxlist . add ( e ) ; } |
public class OrientModelGraphUtils { /** * Sets the value for the active field of a graph object . */
public static void setActiveFieldValue ( ODocument document , Boolean active ) { } } | setFieldValue ( document , ACTIVE_FIELD , active . toString ( ) ) ; |
public class MessagePattern { /** * Returns the substring of the pattern string indicated by the Part .
* Convenience method for getPatternString ( ) . substring ( part . getIndex ( ) , part . getLimit ( ) ) .
* @ param part a part of this MessagePattern .
* @ return the substring associated with part . */
public String getSubstring ( Part part ) { } } | int index = part . index ; return msg . substring ( index , index + part . length ) ; |
public class NeuralNetworkParser { /** * 获取词聚类特征
* @ param ctx 上下文
* @ param cluster4
* @ param cluster6
* @ param cluster
* @ param features 输出特征 */
void get_cluster_features ( final Context ctx , final List < Integer > cluster4 , final List < Integer > cluster6 , final List < Integer > cluster , List < Integer > features ) { } } | if ( ! use_cluster ) { return ; } PUSH ( features , CLUSTER ( cluster , ctx . S0 ) ) ; PUSH ( features , CLUSTER4 ( cluster4 , ctx . S0 ) ) ; PUSH ( features , CLUSTER6 ( cluster6 , ctx . S0 ) ) ; PUSH ( features , CLUSTER ( cluster , ctx . S1 ) ) ; PUSH ( features , CLUSTER ( cluster , ctx . S2 ) ) ; PUSH ( features , CLUSTER ( cluster , ctx . N0 ) ) ; PUSH ( features , CLUSTER4 ( cluster4 , ctx . N0 ) ) ; PUSH ( features , CLUSTER6 ( cluster6 , ctx . N0 ) ) ; PUSH ( features , CLUSTER ( cluster , ctx . N1 ) ) ; PUSH ( features , CLUSTER ( cluster , ctx . N2 ) ) ; PUSH ( features , CLUSTER ( cluster , ctx . S0L ) ) ; PUSH ( features , CLUSTER ( cluster , ctx . S0R ) ) ; PUSH ( features , CLUSTER ( cluster , ctx . S0L2 ) ) ; PUSH ( features , CLUSTER ( cluster , ctx . S0R2 ) ) ; PUSH ( features , CLUSTER ( cluster , ctx . S0LL ) ) ; PUSH ( features , CLUSTER ( cluster , ctx . S0RR ) ) ; PUSH ( features , CLUSTER ( cluster , ctx . S1L ) ) ; PUSH ( features , CLUSTER ( cluster , ctx . S1R ) ) ; PUSH ( features , CLUSTER ( cluster , ctx . S1L2 ) ) ; PUSH ( features , CLUSTER ( cluster , ctx . S1R2 ) ) ; PUSH ( features , CLUSTER ( cluster , ctx . S1LL ) ) ; PUSH ( features , CLUSTER ( cluster , ctx . S1RR ) ) ; |
public class KernelFeatureDefinitionImpl { /** * ( non - Javadoc )
* @ see com . ibm . ws . kernel . feature . internal . subsystem . SubsystemFeatureDefinitionImpl # getHeader ( java . lang . String ) */
@ Override public String getHeader ( String header ) { } } | if ( isSystemBundleProvider ) { if ( FeatureDefinitionUtils . IBM_API_PACKAGE . equals ( header ) ) return generateHeaderFromDefinitions ( "IBM-MergedApiPackage" , sysPkgApiHeader ) ; if ( FeatureDefinitionUtils . IBM_SPI_PACKAGE . equals ( header ) ) return generateHeaderFromDefinitions ( "IBM-MergedSpiPackage" , sysPkgSpiHeader ) ; } return super . getHeader ( header ) ; |
public class FlatRStarTree { /** * Returns true if in the specified node an underflow occurred , false
* otherwise .
* @ param node the node to be tested for underflow
* @ return true if in the specified node an underflow occurred , false
* otherwise */
@ Override protected boolean hasUnderflow ( FlatRStarTreeNode node ) { } } | if ( node . isLeaf ( ) ) { return node . getNumEntries ( ) < leafMinimum ; } else { return false ; } |
public class MOAClassOptionEditor { /** * Paints a representation of the current Object .
* @ param gfx the graphics context to use
* @ param box the area we are allowed to paint into */
public void paintValue ( Graphics gfx , Rectangle box ) { } } | FontMetrics fm ; int vpad ; String val ; fm = gfx . getFontMetrics ( ) ; vpad = ( box . height - fm . getHeight ( ) ) / 2 ; val = ( ( ClassOption ) getValue ( ) ) . getValueAsCLIString ( ) ; gfx . drawString ( val , 2 , fm . getHeight ( ) + vpad ) ; |
public class AbstractRegistry { /** * Activate all { @ code DependencyConsistencyCheckTrigger } added by calling { @ link # registerDependency ( Registry ) } .
* @ throws CouldNotPerformException if the activation fails
* @ throws InterruptedException if the activation is interrupted */
public void activateDependencies ( ) throws CouldNotPerformException , InterruptedException { } } | dependingRegistryMapLock . readLock ( ) . lock ( ) ; try { for ( final DependencyConsistencyCheckTrigger dependencyConsistencyCheckTrigger : dependingRegistryMap . values ( ) ) { dependencyConsistencyCheckTrigger . activate ( ) ; } } finally { dependingRegistryMapLock . readLock ( ) . unlock ( ) ; } |
public class Payload { /** * Add a custom dictionnary with multiple values
* @ param name
* @ param values
* @ throws JSONException */
public void addCustomDictionary ( String name , List values ) throws JSONException { } } | logger . debug ( "Adding custom Dictionary [" + name + "] = (list)" ) ; put ( name , values , payload , false ) ; |
public class InternalSARLParser { /** * $ ANTLR start synpred38 _ InternalSARL */
public final void synpred38_InternalSARL_fragment ( ) throws RecognitionException { } } | // InternalSARL . g : 12983:6 : ( ( ' < ' ' < ' ) )
// InternalSARL . g : 12983:7 : ( ' < ' ' < ' )
{ // InternalSARL . g : 12983:7 : ( ' < ' ' < ' )
// InternalSARL . g : 12984:7 : ' < ' ' < '
{ match ( input , 40 , FOLLOW_91 ) ; if ( state . failed ) return ; match ( input , 40 , FOLLOW_2 ) ; if ( state . failed ) return ; } } |
public class JBBPOut { /** * Write String chars trimmed to bytes , only the lower 8 bit will be saved per
* char code .
* @ param str a String which chars should be trimmed to bytes and saved
* @ param bitOrder the bit outOrder to save bytes
* @ return the DSL session
* @ throws IOException it will be thrown for transport errors
* @ since 1.1 */
public JBBPOut Byte ( final String str , final JBBPBitOrder bitOrder ) throws IOException { } } | assertNotEnded ( ) ; assertStringNotNull ( str ) ; if ( this . processCommands ) { for ( int i = 0 ; i < str . length ( ) ; i ++ ) { byte value = ( byte ) str . charAt ( i ) ; if ( bitOrder == JBBPBitOrder . MSB0 ) { value = JBBPUtils . reverseBitsInByte ( value ) ; } this . outStream . write ( value ) ; } } return this ; |
public class SimpleGroovyClassDoc { /** * returns a sorted array of constructors */
public GroovyConstructorDoc [ ] constructors ( ) { } } | Collections . sort ( constructors ) ; return constructors . toArray ( new GroovyConstructorDoc [ constructors . size ( ) ] ) ; |
public class SliceImpl { /** * ( non - Javadoc )
* @ see
* org . springframework . data . domain . Slice # transform ( org . springframework . core . convert . converter .
* Converter ) */
@ Override public < U > Slice < U > map ( final Function < ? super T , ? extends U > converter ) { } } | return new SliceImpl < > ( this . getConvertedContent ( converter ) , this . pageable , this . hasNext ) ; |
public class CollectionJsonItem { /** * Transform a domain object into a collection of { @ link CollectionJsonData } objects to serialize properly .
* @ return */
public List < CollectionJsonData > getData ( ) { } } | if ( ! this . data . isEmpty ( ) ) { return this . data ; } if ( this . rawData != null && PRIMITIVE_TYPES . contains ( this . rawData . getClass ( ) ) ) { return Collections . singletonList ( new CollectionJsonData ( ) . withValue ( this . rawData ) ) ; } return PropertyUtils . findProperties ( this . rawData ) . entrySet ( ) . stream ( ) . map ( entry -> new CollectionJsonData ( ) . withName ( entry . getKey ( ) ) . withValue ( entry . getValue ( ) ) ) . collect ( Collectors . toList ( ) ) ; |
public class ColumnText { /** * Checks if the element has a height of 0.
* @ return true or false
* @ since 2.1.2 */
public boolean zeroHeightElement ( ) { } } | return composite && ! compositeElements . isEmpty ( ) && ( ( Element ) compositeElements . getFirst ( ) ) . type ( ) == Element . YMARK ; |
public class SecurityServletConfiguratorHelper { /** * Determines if SSL is required . SSL is required if the transport guarantee is other than NONE .
* Note that only one user - data - constraint element can be present per security - constraint . Only the first occurrence is processed .
* If multiple web fragments specify this element with different values and it ' s absent from the web . xml , this will result
* in an error that fails the application install .
* @ param archiveConstraint the security - constraint
* @ return false when transport - guarantee is NONE , otherwise true */
private boolean isSSLRequired ( com . ibm . ws . javaee . dd . web . common . SecurityConstraint archiveConstraint ) { } } | boolean sslRequired = false ; UserDataConstraint dataConstraint = archiveConstraint . getUserDataConstraint ( ) ; if ( dataConstraint != null ) { int transportGuarantee = dataConstraint . getTransportGuarantee ( ) ; String webResourceName = archiveConstraint . getWebResourceCollections ( ) . get ( 0 ) . getWebResourceName ( ) ; Map < String , ConfigItem < String > > userDataConstraintMap = configurator . getConfigItemMap ( USER_DATA_CONSTRAINT_KEY ) ; ConfigItem < String > existingUserDataConstraint = userDataConstraintMap . get ( webResourceName ) ; if ( existingUserDataConstraint == null ) { userDataConstraintMap . put ( webResourceName , this . configurator . createConfigItem ( String . valueOf ( transportGuarantee ) ) ) ; if ( transportGuarantee != UserDataConstraint . TRANSPORT_GUARANTEE_NONE ) { sslRequired = true ; } } else { this . configurator . validateDuplicateConfiguration ( SECURITY_CONSTRAINT_KEY , USER_DATA_CONSTRAINT_KEY , String . valueOf ( transportGuarantee ) , existingUserDataConstraint ) ; // ignore user - data - constraint specified in web - fragments , since it ' s already specified in web . xml
if ( ConfigSource . WEB_FRAGMENT == this . configurator . getConfigSource ( ) && ConfigSource . WEB_XML == existingUserDataConstraint . getSource ( ) ) { return false ; } } } return sslRequired ; |
public class JBossClassLoaderHandler { /** * Handle a module .
* @ param module
* the module
* @ param visitedModules
* visited modules
* @ param classLoader
* the classloader
* @ param classpathOrderOut
* the classpath order
* @ param scanSpec
* the scan spec
* @ param log
* the log */
private static void handleRealModule ( final Object module , final Set < Object > visitedModules , final ClassLoader classLoader , final ClasspathOrder classpathOrderOut , final ScanSpec scanSpec , final LogNode log ) { } } | if ( ! visitedModules . add ( module ) ) { // Avoid extracting paths from the same module more than once
return ; } ClassLoader moduleLoader = ( ClassLoader ) ReflectionUtils . invokeMethod ( module , "getClassLoader" , false ) ; if ( moduleLoader == null ) { moduleLoader = classLoader ; } // type VFSResourceLoader [ ]
final Object vfsResourceLoaders = ReflectionUtils . invokeMethod ( moduleLoader , "getResourceLoaders" , false ) ; if ( vfsResourceLoaders != null ) { for ( int i = 0 , n = Array . getLength ( vfsResourceLoaders ) ; i < n ; i ++ ) { // type JarFileResourceLoader for jars , VFSResourceLoader for exploded jars , PathResourceLoader
// for resource directories , or NativeLibraryResourceLoader for ( usually non - existent ) native
// library " lib / " dirs adjacent to the jarfiles that they were presumably extracted from .
final Object resourceLoader = Array . get ( vfsResourceLoaders , i ) ; // Could skip NativeLibraryResourceLoader instances altogether , but testing for their existence
// only seems to add about 3 % to the total scan time .
// if ( ! resourceLoader . getClass ( ) . getSimpleName ( ) . equals ( " NativeLibraryResourceLoader " ) ) {
handleResourceLoader ( resourceLoader , moduleLoader , classpathOrderOut , scanSpec , log ) ; } } |
public class S3ProxyHandler { /** * which XML 1.0 cannot represent . */
private static String encodeBlob ( String encodingType , String blobName ) { } } | if ( encodingType != null && encodingType . equals ( "url" ) ) { return urlEscaper . escape ( blobName ) ; } else { return blobName ; } |
public class NoETagFilter { /** * Disables { @ code ETag } HTTP header .
* { @ inheritDoc } */
@ Override public void doFilter ( ServletRequest servletRequest , ServletResponse servletResponse , FilterChain filterChain ) throws IOException , ServletException { } } | filterChain . doFilter ( servletRequest , new HttpServletResponseWrapper ( ( HttpServletResponse ) servletResponse ) { @ Override public void setHeader ( String name , String value ) { if ( ! HTTPCacheHeader . ETAG . getName ( ) . equalsIgnoreCase ( name ) ) { super . setHeader ( name , value ) ; } } } ) ; |
public class JavaUtils { /** * True if the given class is a primitive or array of which . */
public static boolean isPrimitive ( Class < ? > javaType ) { } } | return javaType . isPrimitive ( ) || ( javaType . isArray ( ) && isPrimitive ( javaType . getComponentType ( ) ) ) ; |
public class ParameterReferenceImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
@ Override public void eSet ( int featureID , Object newValue ) { } } | switch ( featureID ) { case XtextPackage . PARAMETER_REFERENCE__PARAMETER : setParameter ( ( Parameter ) newValue ) ; return ; } super . eSet ( featureID , newValue ) ; |
public class SchedulerBase { /** * This method is invoked by { @ link # addWithAutomaticPhasing ( Schedulable , int ) } and calculates the best phase based on the
* number of frames of the dry run . The optimal phase is guaranteed if the number of simulated frames is at least as large as
* the size of the least common multiple ( LCM , see { @ link ArithmeticUtils # lcmPositive ( int , int ) } ) of all the frequency values
* used in the scheduler so far .
* @ param frequency the frequency of the skedulable task to add
* @ return the best phase based on the length of the dry run . */
protected int calculatePhase ( int frequency ) { } } | if ( frequency > phaseCounters . size ) phaseCounters . ensureCapacity ( frequency - phaseCounters . size ) ; int [ ] items = phaseCounters . items ; // Reset counters
phaseCounters . size = frequency ; for ( int i = 0 ; i < frequency ; i ++ ) items [ i ] = 0 ; // Perform a dry run
for ( int frame = 0 ; frame < dryRunFrames ; frame ++ ) { int slot = frame % frequency ; // Go through each task
for ( int i = 0 ; i < schedulableRecords . size ; i ++ ) { T record = schedulableRecords . get ( i ) ; // If it is due , count it
if ( ( frame - record . phase ) % record . frequency == 0 ) items [ slot ] ++ ; } } int minValue = Integer . MAX_VALUE ; int minValueAt = - 1 ; for ( int i = 0 ; i < frequency ; i ++ ) { if ( items [ i ] < minValue ) { minValue = items [ i ] ; minValueAt = i ; } } // Return the phase
return minValueAt ; |
public class URBridgeEntity { /** * Returns the uniqueId of a user or a group .
* @ param setAttr a boolean indicating whether to actually set the attribute
* in the entity or just perform a lookup .
* @ return the user or group ' s uniqueId .
* @ throws Exception identifier values are missing or the underlying
* registry threw an error . */
public String getUniqueId ( boolean setAttr ) throws Exception { } } | String uniqueName = null ; String uniqueId = null ; if ( entity . getIdentifier ( ) . isSet ( uniqueIdProp ) ) { uniqueId = ( String ) entity . getIdentifier ( ) . get ( uniqueIdProp ) ; return uniqueId ; } uniqueName = ( String ) entity . getIdentifier ( ) . get ( securityNameProp ) ; if ( ( uniqueId == null ) && ( uniqueName == null ) ) { throw new WIMApplicationException ( WIMMessageKey . REQUIRED_IDENTIFIERS_MISSING , Tr . formatMessage ( tc , WIMMessageKey . REQUIRED_IDENTIFIERS_MISSING ) ) ; } // Get the attribute value . If it ' s part of the DN we must strip it out .
// ZZZZ uniqueName = stripRDN ( uniqueName ) ;
uniqueId = getUniqueIdForEntity ( uniqueName ) ; if ( setAttr ) { // Set the attribute in the WIM entity .
entity . getIdentifier ( ) . set ( uniqueIdProp , uniqueId ) ; } return uniqueId ; |
public class CmsIdentifiableObjectContainer { /** * Returns the list of objects . < p >
* @ return the a list of < code > { @ link Object } < / code > s . */
public List < T > elementList ( ) { } } | if ( m_cache != null ) { return m_cache ; } if ( m_relativeOrdered ) { List < T > objectList = new ArrayList < T > ( ) ; Iterator < CmsIdObjectElement < T > > itObjs = m_orderedObjectList . iterator ( ) ; while ( itObjs . hasNext ( ) ) { CmsIdObjectElement < T > object = itObjs . next ( ) ; objectList . add ( object . getObject ( ) ) ; } m_cache = Collections . unmodifiableList ( objectList ) ; } else { m_cache = Collections . unmodifiableList ( m_objectList ) ; } return m_cache ; |
public class WhileyFileParser { /** * Parse a type declaration in a Whiley source file , which has the form :
* < pre >
* " type " Identifier " is " TypePattern ( " where " Expr ) *
* < / pre >
* Here , the type pattern specifies a type which may additionally be adorned
* with variable names . The " where " clause is optional and is often referred
* to as the type ' s " constraint " . Variables defined within the type pattern
* may be used within this constraint expressions . A simple example to
* illustrate is :
* < pre >
* type nat is ( int x ) where x > = 0
* < / pre >
* Here , we are defining a < i > constrained type < / i > called < code > nat < / code >
* which represents the set of natural numbers ( i . e the non - negative
* integers ) . Type declarations may also have modifiers , such as
* < code > public < / code > and < code > private < / code > .
* @ see wyil . lang . WyilFile . Type
* @ param modifiers
* - - - The list of modifiers for this declaration ( which were
* already parsed before this method was called ) . */
public Decl . Type parseTypeDeclaration ( Tuple < Modifier > modifiers ) { } } | int start = index ; EnclosingScope scope = new EnclosingScope ( ) ; match ( Identifier ) ; // type
// Parse type name
Identifier name = parseIdentifier ( ) ; // Parse templare variables
Tuple < Template . Variable > template = parseOptionalTemplate ( scope ) ; match ( Is ) ; // Parse the type pattern
Decl . Variable var = parseOptionalParameter ( scope ) ; addFieldAliases ( var , scope ) ; Tuple < Expr > invariant = parseInvariant ( scope , Where ) ; int end = index ; matchEndLine ( ) ; return annotateSourceLocation ( new Decl . Type ( modifiers , name , template , var , invariant ) , start ) ; |
public class ContrastSwatch { /** * Set the fields mBackgroundColor , mForegroundColor , mBackgroundLuminance and
* mForegroundLuminance based upon the color histogram . */
private void extractFgBgData ( Map < Integer , Integer > colorHistogram ) { } } | if ( colorHistogram . isEmpty ( ) ) { // An empty histogram indicates we ' ve encountered a 0px area image . It has no luminance .
mBackgroundLuminance = mForegroundLuminance = 0 ; mBackgroundColor = Color . BLACK ; mForegroundColor = Color . BLACK ; } else if ( colorHistogram . size ( ) == 1 ) { // Deal with views that only contain a single color
final int singleColor = colorHistogram . keySet ( ) . iterator ( ) . next ( ) ; mBackgroundLuminance = mForegroundLuminance = ContrastUtils . calculateLuminance ( singleColor ) ; mForegroundColor = singleColor ; mBackgroundColor = singleColor ; } else { // Find the average luminance value within the set of luminances for
// purposes of splitting luminance values into high - luminance and
// low - luminance buckets . This is explicitly not a weighted average .
double luminanceSum = 0 ; for ( int color : colorHistogram . keySet ( ) ) { luminanceSum += ContrastUtils . calculateLuminance ( color ) ; } final double averageLuminance = luminanceSum / colorHistogram . size ( ) ; // Select the highest and lowest luminance values that contribute to
// most number of pixels in the image - - our background and
// foreground colors .
double lowLuminanceContributor = 0.0d ; double highLuminanceContributor = 1.0d ; int lowLuminanceColor = - 1 ; int highLuminanceColor = - 1 ; int maxLowLuminanceFrequency = 0 ; int maxHighLuminanceFrequency = 0 ; for ( Entry < Integer , Integer > entry : colorHistogram . entrySet ( ) ) { final int color = entry . getKey ( ) ; final double luminanceValue = ContrastUtils . calculateLuminance ( color ) ; final int frequency = entry . getValue ( ) ; if ( ( luminanceValue < averageLuminance ) && ( frequency > maxLowLuminanceFrequency ) ) { lowLuminanceContributor = luminanceValue ; maxLowLuminanceFrequency = frequency ; lowLuminanceColor = color ; } else if ( ( luminanceValue >= averageLuminance ) && ( frequency > maxHighLuminanceFrequency ) ) { highLuminanceContributor = luminanceValue ; maxHighLuminanceFrequency = frequency ; highLuminanceColor = color ; } } // Background luminance is that which occurs more frequently
if ( maxHighLuminanceFrequency > maxLowLuminanceFrequency ) { mBackgroundLuminance = highLuminanceContributor ; mBackgroundColor = highLuminanceColor ; mForegroundLuminance = lowLuminanceContributor ; mForegroundColor = lowLuminanceColor ; } else { mBackgroundLuminance = lowLuminanceContributor ; mBackgroundColor = lowLuminanceColor ; mForegroundLuminance = highLuminanceContributor ; mForegroundColor = highLuminanceColor ; } } |
public class FileUtils { /** * Normalize topic path base on current directory and href value , by
* replacing " \ \ " and " \ " with { @ link File # separator } , and removing " . " , " . . "
* from the file path , with no change to substring behind " # " .
* @ param rootPath root directory path
* @ param relativePath relative path
* @ return resolved topic file */
@ Deprecated public static String resolveTopic ( final File rootPath , final String relativePath ) { } } | return setFragment ( resolve ( rootPath , stripFragment ( relativePath ) ) . getPath ( ) , getFragment ( relativePath ) ) ; |
public class MBeanAccessChecker { /** * Check whether a value matches patterns in pValues */
private boolean wildcardMatch ( Set < String > pValues , String pValue ) { } } | for ( String pattern : pValues ) { if ( pattern . contains ( "*" ) && pValue . matches ( pattern . replaceAll ( "\\*" , ".*" ) ) ) { return true ; } } return false ; |
public class ObjectStreamField { /** * Compares this field descriptor to the specified one . Checks first if one
* of the compared fields has a primitive type and the other one not . If so ,
* the field with the primitive type is considered to be " smaller " . If both
* fields are equal , their names are compared .
* @ param o
* the object to compare with .
* @ return - 1 if this field is " smaller " than field { @ code o } , 0 if both
* fields are equal ; 1 if this field is " greater " than field { @ code */
public int compareTo ( Object o ) { } } | ObjectStreamField f = ( ObjectStreamField ) o ; boolean thisPrimitive = this . isPrimitive ( ) ; boolean fPrimitive = f . isPrimitive ( ) ; // If one is primitive and the other isn ' t , we have enough info to
// compare
if ( thisPrimitive != fPrimitive ) { return thisPrimitive ? - 1 : 1 ; } // Either both primitives or both not primitives . Compare based on name .
return this . getName ( ) . compareTo ( f . getName ( ) ) ; |
public class OJBSearchFilter { /** * Change the search filter to one that specifies an element to not
* match one single value .
* The old search filter is deleted .
* @ param elementName is the name of the element to be matched
* @ param value is the value to not be matched
* @ param oper is the IN or NOT _ IN operator to indicate how to matche */
public void matchValue ( String elementName , String value , int oper ) { } } | // Delete the old search criteria
criteria = new Criteria ( ) ; if ( oper != NOT_IN ) { criteria . addEqualTo ( elementName , value ) ; } else { criteria . addNotEqualTo ( elementName , value ) ; } |
public class MtasBufferedReader { /** * Read line .
* @ param ignoreLF the ignore LF
* @ return the string
* @ throws IOException Signals that an I / O exception has occurred . */
String readLine ( boolean ignoreLF ) throws IOException { } } | StringBuffer s = null ; int startChar ; synchronized ( lock ) { ensureOpen ( ) ; boolean omitLF = ignoreLF || skipLF ; for ( ; ; ) { if ( nextChar >= nChars ) fill ( ) ; if ( nextChar >= nChars ) { /* EOF */
if ( s != null && s . length ( ) > 0 ) return s . toString ( ) ; else return null ; } boolean eol = false ; char c = 0 ; int i ; /* Skip a leftover ' \ n ' , if necessary */
if ( omitLF && ( cb [ nextChar ] == '\n' ) ) nextChar ++ ; skipLF = false ; omitLF = false ; charLoop : for ( i = nextChar ; i < nChars ; i ++ ) { c = cb [ i ] ; if ( ( c == '\n' ) || ( c == '\r' ) ) { eol = true ; break charLoop ; } } startChar = nextChar ; nextChar = i ; if ( eol ) { String str ; if ( s == null ) { str = new String ( cb , startChar , i - startChar ) ; } else { s . append ( cb , startChar , i - startChar ) ; str = s . toString ( ) ; } nextChar ++ ; if ( c == '\r' ) { skipLF = true ; } return str ; } if ( s == null ) s = new StringBuffer ( defaultExpectedLineLength ) ; s . append ( cb , startChar , i - startChar ) ; } } |
public class HTTP2Session { /** * This method is invoked when the idle timeout triggers . We check the close state
* to act appropriately :
* < ul >
* < li > NOT _ CLOSED : it ' s a real idle timeout , we just initiate a close , see
* { @ link # close ( int , String , Callback ) } . < / li >
* < li > LOCALLY _ CLOSED : we have sent a GO _ AWAY and only shutdown the output , but the
* other peer did not close the connection so we never received the TCP FIN , and
* therefore we terminate . < / li >
* < li > REMOTELY _ CLOSED : the other peer sent us a GO _ AWAY , we should have queued a
* disconnect , but for some reason it was not processed ( for example , queue was
* stuck because of TCP congestion ) , therefore we terminate .
* See { @ link # onGoAway ( GoAwayFrame ) } . < / li >
* < / ul >
* @ return true if the session should be closed , false otherwise
* @ see # onGoAway ( GoAwayFrame )
* @ see # close ( int , String , Callback )
* @ see # onShutdown ( ) */
@ Override public boolean onIdleTimeout ( ) { } } | switch ( closed . get ( ) ) { case NOT_CLOSED : { long elapsed = Millisecond100Clock . currentTimeMillis ( ) - idleTime ; if ( log . isDebugEnabled ( ) ) { log . debug ( "HTTP2 session on idle timeout. The elapsed time is {} - {}" , elapsed , endPoint . getMaxIdleTimeout ( ) ) ; } return elapsed >= endPoint . getMaxIdleTimeout ( ) && notifyIdleTimeout ( this ) ; } case LOCALLY_CLOSED : case REMOTELY_CLOSED : { abort ( new TimeoutException ( "Idle timeout " + endPoint . getMaxIdleTimeout ( ) + " ms" ) ) ; return false ; } default : { return false ; } } |
public class DataSiftManagedSource { /** * @ param source start a configured managed source
* @ return this */
public FutureData < DataSiftResult > start ( final FutureData < ManagedSource > source ) { } } | if ( source == null ) { throw new IllegalArgumentException ( "A data source is required" ) ; } final FutureData < DataSiftResult > future = new FutureData < > ( ) ; final DataSiftResult res = new BaseDataSiftResult ( ) ; unwrapFuture ( source , future , res , new FutureResponse < ManagedSource > ( ) { public void apply ( ManagedSource data ) { URI uri = newParams ( ) . forURL ( config . newAPIEndpointURI ( START ) ) ; POST request = config . http ( ) . POST ( uri , new PageReader ( newRequestCallback ( future , data , config ) ) ) . form ( "id" , data . getId ( ) ) ; performRequest ( future , request ) ; } } ) ; return future ; |
public class GelfLayout { /** * Add a staticAdditional field . This is mainly here for compatibility with logback . xml
* @ param keyValue This must be in format key : value where key is the additional field key , and value is a static
* string . e . g " _ node _ name : www013"
* @ deprecated Use addStaticField instead */
public void addStaticAdditionalField ( String keyValue ) { } } | String [ ] splitted = keyValue . split ( ":" ) ; if ( splitted . length != 2 ) { throw new IllegalArgumentException ( "staticAdditionalField must be of the format key:value, where key is the " + "additional field key (therefore should have a leading underscore), and value is a static string. " + "e.g. _node_name:www013" ) ; } staticFields . put ( splitted [ 0 ] , splitted [ 1 ] ) ; |
public class GridCell { /** * Add an element in the cell .
* @ param element the element .
* @ return < code > true < / code > if the element is added ;
* otherwise < code > false < / code > . */
public boolean addElement ( GridCellElement < P > element ) { } } | if ( element != null && this . elements . add ( element ) ) { if ( element . addCellLink ( this ) ) { ++ this . referenceElementCount ; } return true ; } return false ; |
public class LBiObjLongFunctionBuilder { /** * One of ways of creating builder . This might be the only way ( considering all _ functional _ builders ) that might be utilize to specify generic params only once . */
@ Nonnull public static < T1 , T2 , R > LBiObjLongFunctionBuilder < T1 , T2 , R > biObjLongFunction ( Consumer < LBiObjLongFunction < T1 , T2 , R > > consumer ) { } } | return new LBiObjLongFunctionBuilder ( consumer ) ; |
public class ServletRESTRequestWithParams { /** * ( non - Javadoc )
* @ see com . ibm . wsspi . rest . handler . RESTRequest # getInputStream ( ) */
@ Override public InputStream getInputStream ( ) throws IOException { } } | ServletRESTRequestImpl ret = castRequest ( ) ; if ( ret != null ) return ret . getInputStream ( ) ; return null ; |
public class ReferenceField { /** * Get the IconField from this record . */
public ImageField getIconField ( Record record ) { } } | if ( record == null ) record = this . getReferenceRecord ( ) ; for ( int i = 0 ; i < record . getFieldCount ( ) ; i ++ ) { BaseField field = record . getField ( i ) ; if ( field instanceof ImageField ) return ( ImageField ) field ; } return null ; // No icon in this status record |
public class MicroProfileJwtConfigImpl { /** * { @ inheritDoc } */
@ Override public ConsumerUtils getConsumerUtils ( ) { } } | if ( consumerUtils == null ) { // lazy init
MicroProfileJwtService service = mpJwtServiceRef . getService ( ) ; if ( service != null ) { consumerUtils = new ConsumerUtils ( service . getKeyStoreServiceRef ( ) ) ; } else { Tr . warning ( tc , "SERVICE_NOT_FOUND_JWT_CONSUMER_NOT_AVAILABLE" , new Object [ ] { uniqueId } ) ; } } return consumerUtils ; |
public class WeakObjectRegistry { /** * Returns the object that is identified by an id .
* @ param id
* The id of the object that should be returned .
* @ return The object if one is registered by this id or an empty { @ link Optional } if not . */
public Optional < Object > getById ( final UUID id ) { } } | return Optional . ofNullable ( idToObject . get ( id ) ) ; |
public class DoubleIntIndex { /** * Modifies an existing pair .
* @ param i the index
* @ param key the key */
public synchronized void setKey ( int i , int key ) { } } | if ( i < 0 || i >= count ) { throw new IndexOutOfBoundsException ( ) ; } if ( ! sortOnValues ) { sorted = false ; } keys [ i ] = key ; |
public class AviaterRegexFilter { /** * 修复正则表达式匹配的问题 , 即使按照长度递减排序 , 还是会出现以下问题 :
* < pre >
* foooo | f . * t 匹配 fooooot 出错 , 原因是 fooooot 匹配了 foooo 之后 , 会将 fooo 和数据进行匹配 , 但是 foooo 的长度和 fooooot 的长度不一样
* < / pre >
* 因此此类对正则表达式进行头尾完全匹配
* @ author simon
* @ version 1.0.0 */
private List < String > completionPattern ( List < String > patterns ) { } } | List < String > result = new ArrayList < String > ( ) ; for ( String pattern : patterns ) { StringBuffer stringBuffer = new StringBuffer ( ) ; stringBuffer . append ( "^" ) ; stringBuffer . append ( pattern ) ; stringBuffer . append ( "$" ) ; result . add ( stringBuffer . toString ( ) ) ; } return result ; |
public class Shape { /** * Return true if the shapes are equal after removing any size 1 dimensions
* For example , [ 1,3,4 ] and [ 3,4 ] are considered equal by this method .
* Or [ 2,1,1 ] and [ 1,2 ] are considered equal .
* @ param shape1 First shape
* @ param shape2 Second shape
* @ return */
public static boolean shapeEqualWithSqueeze ( long [ ] shape1 , long [ ] shape2 ) { } } | if ( shape1 == null ) return shape2 == null ; if ( shape2 == null ) return false ; // Shape 1 must be non - null by this point
if ( shape1 . length == 0 && shape2 . length == 0 ) return true ; int pos1 = 0 ; int pos2 = 0 ; while ( pos1 < shape1 . length && pos2 < shape2 . length ) { if ( shape1 [ pos1 ] == 1 ) { pos1 ++ ; continue ; } if ( shape2 [ pos2 ] == 1 ) { pos2 ++ ; continue ; } // Both are non - 1 shape . Must be equal
if ( shape1 [ pos1 ] != shape2 [ pos2 ] ) { return false ; } pos1 ++ ; pos2 ++ ; } // Handle trailing 1s
while ( pos1 < shape1 . length && shape1 [ pos1 ] == 1 ) pos1 ++ ; while ( pos2 < shape2 . length && shape2 [ pos2 ] == 1 ) pos2 ++ ; // 2 possibilities : all entries consumed - > same shape . Or some remaining - something like [ 2 ] vs . [ 2,3,4,5]
return pos1 == shape1 . length && pos2 == shape2 . length ; |
public class RunMojo { /** * Add any relevant project dependencies to the classpath . Takes
* includeProjectDependencies into consideration .
* @ param path classpath of { @ link java . net . URL } objects
* @ throws MojoExecutionException */
private void addRelevantProjectDependenciesToClasspath ( Set < URL > path ) throws MojoExecutionException { } } | if ( this . includeProjectDependencies ) { try { getLog ( ) . debug ( "Project Dependencies will be included." ) ; URL mainClasses = new File ( project . getBuild ( ) . getOutputDirectory ( ) ) . toURI ( ) . toURL ( ) ; getLog ( ) . debug ( "Adding to classpath : " + mainClasses ) ; path . add ( mainClasses ) ; Set < Artifact > dependencies = CastUtils . cast ( project . getArtifacts ( ) ) ; // system scope dependencies are not returned by maven 2.0 . See
// MEXEC - 17
dependencies . addAll ( getAllNonTestScopedDependencies ( ) ) ; Iterator < Artifact > iter = dependencies . iterator ( ) ; while ( iter . hasNext ( ) ) { Artifact classPathElement = iter . next ( ) ; getLog ( ) . debug ( "Adding project dependency artifact: " + classPathElement . getArtifactId ( ) + " to classpath" ) ; File file = classPathElement . getFile ( ) ; if ( file != null ) { path . add ( file . toURI ( ) . toURL ( ) ) ; } } } catch ( MalformedURLException e ) { throw new MojoExecutionException ( "Error during setting up classpath" , e ) ; } } else { getLog ( ) . debug ( "Project Dependencies will be excluded." ) ; } |
public class JsGeometryEditor { /** * Add the list of snapping rules as they are configured for a specific layer within the XML configuration .
* @ param layerId The vector layer to use the configuration from . */
public void addLayerSnappingRules ( String layerId ) { } } | Layer < ? > layer = mapWidget . getMapModel ( ) . getLayer ( layerId ) ; if ( layer != null && layer instanceof VectorLayer ) { VectorLayer vLayer = ( VectorLayer ) layer ; for ( SnappingRuleInfo snappingRuleInfo : vLayer . getLayerInfo ( ) . getSnappingRules ( ) ) { SnapRuleUtil . addRule ( delegate . getSnappingService ( ) , mapWidget , snappingRuleInfo ) ; } } |
public class AmazonLightsailClient { /** * Returns information about all of your databases in Amazon Lightsail .
* @ param getRelationalDatabasesRequest
* @ return Result of the GetRelationalDatabases operation returned by the service .
* @ throws ServiceException
* A general service exception .
* @ throws InvalidInputException
* Lightsail throws this exception when user input does not conform to the validation rules of an input
* field . < / p > < note >
* Domain - related APIs are only available in the N . Virginia ( us - east - 1 ) Region . Please set your AWS Region
* configuration to us - east - 1 to create , view , or edit these resources .
* @ throws NotFoundException
* Lightsail throws this exception when it cannot find a resource .
* @ throws OperationFailureException
* Lightsail throws this exception when an operation fails to execute .
* @ throws AccessDeniedException
* Lightsail throws this exception when the user cannot be authenticated or uses invalid credentials to
* access a resource .
* @ throws AccountSetupInProgressException
* Lightsail throws this exception when an account is still in the setup in progress state .
* @ throws UnauthenticatedException
* Lightsail throws this exception when the user has not been authenticated .
* @ sample AmazonLightsail . GetRelationalDatabases
* @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / lightsail - 2016-11-28 / GetRelationalDatabases "
* target = " _ top " > AWS API Documentation < / a > */
@ Override public GetRelationalDatabasesResult getRelationalDatabases ( GetRelationalDatabasesRequest request ) { } } | request = beforeClientExecution ( request ) ; return executeGetRelationalDatabases ( request ) ; |
public class SBT012Analysis { /** * { @ inheritDoc } */
@ Override public void writeToFile ( File analysisCacheFile ) { } } | AnalysisStore analysisStore = Compiler . analysisStore ( analysisCacheFile ) ; analysisStore . set ( analysis , analysisStore . get ( ) . get ( ) . _2 /* compileSetup */
) ; |
public class DependencyAnalyzer { /** * Analyze the dependencies for all classes in the directory and it ' s sub directories .
* @ param classesDir
* Directory where the " * . class " files are located ( something like " bin " or " classes " ) . */
public final void analyze ( final File classesDir ) { } } | final FileProcessor fileProcessor = new FileProcessor ( new FileHandler ( ) { @ Override public final FileHandlerResult handleFile ( final File classFile ) { if ( ! classFile . getName ( ) . endsWith ( ".class" ) ) { return FileHandlerResult . CONTINUE ; } try { final ClassInfo classInfo = new ClassInfo ( classFile ) ; final Package < DependsOn > allowedPkg = dependencies . findAllowedByName ( classInfo . getPackageName ( ) ) ; if ( allowedPkg == null ) { final Package < NotDependsOn > forbiddenPkg = dependencies . findForbiddenByName ( classInfo . getPackageName ( ) ) ; if ( forbiddenPkg == null ) { dependencyErrors . addAll ( checkAlwaysForbiddenSection ( dependencies , classInfo ) ) ; } else { dependencyErrors . addAll ( checkForbiddenSection ( dependencies , forbiddenPkg , classInfo ) ) ; } } else { dependencyErrors . addAll ( checkAllowedSection ( dependencies , allowedPkg , classInfo ) ) ; } } catch ( final IOException ex ) { throw new RuntimeException ( "Error handling file: " + classFile , ex ) ; } return FileHandlerResult . CONTINUE ; } } ) ; dependencyErrors . clear ( ) ; fileProcessor . process ( classesDir ) ; |
public class Stapler { /** * For XSS prevention , escape unsafe characters in String by default ,
* unless it ' s specifically wrapped in { @ link RawHtmlArgument } . */
public static Object htmlSafeArgument ( Object o ) { } } | if ( o instanceof RawHtmlArgument ) return ( ( RawHtmlArgument ) o ) . getValue ( ) ; if ( o instanceof Number || o instanceof Calendar || o instanceof Date ) // formatting numbers and date often requires that they be kept intact
return o ; if ( o == null ) return o ; return escape ( o . toString ( ) ) ; |
public class CmsSolrQuery { /** * Creates a OR combined ' q ' parameter . < p >
* @ param text the query string .
* @ return returns the ' q ' parameter */
private String createTextQuery ( String text ) { } } | if ( m_textSearchFields . isEmpty ( ) ) { m_textSearchFields . add ( CmsSearchField . FIELD_TEXT ) ; } String q = "{!q.op=OR type=" + getRequestHandler ( ) + " qf=" ; boolean first = true ; for ( String textField : m_textSearchFields ) { if ( ! first ) { q += " " ; } q += textField ; } q += "}" + text ; return q ; |
public class ServletStartedListener { /** * Constructs a list of SecurityConstraint objects from the given ServletSecurityElement and list of URL patterns .
* @ param securityMetadataFromDD the security metadata processed from the deployment descriptor , for updating the roles
* @ param servletSecurity the ServletSecurityElement that represents the information parsed from the @ ServletSecurity annotation
* @ param urlPatterns the list of URL patterns defined in the @ WebServlet annotation
* @ return a list of SecurityConstraint objects */
private List < SecurityConstraint > createSecurityConstraints ( SecurityMetadata securityMetadataFromDD , ServletSecurityElement servletSecurity , Collection < String > urlPatterns ) { } } | List < SecurityConstraint > securityConstraints = new ArrayList < SecurityConstraint > ( ) ; securityConstraints . add ( getConstraintFromHttpElement ( securityMetadataFromDD , urlPatterns , servletSecurity ) ) ; securityConstraints . addAll ( getConstraintsFromHttpMethodElement ( securityMetadataFromDD , urlPatterns , servletSecurity ) ) ; return securityConstraints ; |
public class ImgUtil { /** * 写出图像为PNG格式
* @ param image { @ link Image }
* @ param destImageStream 写出到的目标流
* @ throws IORuntimeException IO异常 */
public static void writePng ( Image image , ImageOutputStream destImageStream ) throws IORuntimeException { } } | write ( image , IMAGE_TYPE_PNG , destImageStream ) ; |
public class Matching { /** * Counts the number of child nodes in the subtrees of x and y that are also
* in the matching .
* @ param paramNodeX
* first subtree root node
* @ param paramNodeY
* second subtree root node
* @ return number of children which have been matched
* @ throws TTIOException */
public long containedChildren ( final ITreeData paramNodeX , final ITreeData paramNodeY ) throws TTIOException { } } | assert paramNodeX != null ; assert paramNodeY != null ; long retVal = 0 ; mRtxOld . moveTo ( paramNodeX . getDataKey ( ) ) ; for ( final AbsAxis axis = new DescendantAxis ( mRtxOld , true ) ; axis . hasNext ( ) ; axis . next ( ) ) { retVal += mIsInSubtree . get ( paramNodeY , partner ( mRtxOld . getNode ( ) ) ) ? 1 : 0 ; } return retVal ; |
public class UICommand { /** * A method binding EL expression that identifies an action listener method to be invoked if
* this component is activated by the user .
* An action listener method accepts a parameter of type javax . faces . event . ActionEvent and returns void .
* The phase that this event is fired in can be controlled via the immediate attribute .
* @ deprecated */
@ JSFProperty ( stateHolder = true , returnSignature = "void" , methodSignature = "javax.faces.event.ActionEvent" ) public MethodBinding getActionListener ( ) { } } | return ( MethodBinding ) getStateHelper ( ) . eval ( PropertyKeys . actionListener ) ; |
public class RpcConfigs { /** * 值是否发生变化
* @ param oldObj 旧值
* @ param newObj 新值
* @ return 是否变化 boolean */
protected static boolean changed ( Object oldObj , Object newObj ) { } } | return oldObj == null ? newObj != null : ! oldObj . equals ( newObj ) ; |
public class MainWindowController { /** * Here we go through all the menu items and populate a GridPane with controls to display and edit each item .
* We start at ROOT and through every item in all submenus . This is a recursive function that is repeatedly
* called on sub menu ' s until we ' ve finished all items . */
private int buildGrid ( SubMenuItem subMenu , int inset , int gridPosition ) { } } | // inset is the amount of offset to apply to the left , to make the tree look realistic
// gridPosition is the row in the grid we populating .
// while there are more menuitems in the current level
for ( MenuItem item : menuTree . getMenuItems ( subMenu ) ) { if ( item . hasChildren ( ) ) { // for submenus , we make single label and call back on ourselves with the next level down
Label itemLbl = new Label ( "SubMenu " + item . getName ( ) ) ; itemLbl . setPadding ( new Insets ( 12 , 10 , 12 , inset ) ) ; itemLbl . setStyle ( "-fx-font-weight: bold; -fx-font-size: 120%;" ) ; itemGrid . add ( itemLbl , 0 , gridPosition ++ , 2 , 1 ) ; gridPosition = buildGrid ( MenuItemHelper . asSubMenu ( item ) , inset + 10 , gridPosition ) ; } else { // otherwise for child items we create the controls that display the value and allow
// editing .
Label itemLbl = new Label ( item . getName ( ) ) ; itemLbl . setPadding ( new Insets ( 3 , 10 , 3 , inset ) ) ; itemGrid . add ( itemLbl , 0 , gridPosition ) ; itemGrid . add ( createUiControlForItem ( item ) , 1 , gridPosition ) ; renderItemValue ( item ) ; gridPosition ++ ; } } return gridPosition ; |
public class ContextUtils { /** * Get the file points to an external alarms directory .
* @ param context the context .
* @ return the { @ link java . io . File } . */
@ TargetApi ( Build . VERSION_CODES . FROYO ) public static File getExternalFilesDirForAlarms ( Context context ) { } } | return context . getExternalFilesDir ( Environment . DIRECTORY_ALARMS ) ; |
public class ElementMatchers { /** * Matches a { @ link ByteCodeElement } for being declared by a given { @ link TypeDescription } . This matcher matches
* a declared element ' s raw declaring type .
* @ param type The type that is expected to declare the matched byte code element .
* @ param < T > The type of the matched object .
* @ return A matcher for byte code elements being declared by the given { @ code type } . */
public static < T extends ByteCodeElement > ElementMatcher . Junction < T > isDeclaredBy ( TypeDescription type ) { } } | return isDeclaredBy ( is ( type ) ) ; |
public class Unsigned { /** * Create a UByte [ ] from { @ code values } . */
public static UByte [ ] ubyte_a ( byte ... values ) { } } | UByte [ ] array = new UByte [ values . length ] ; for ( int i = 0 ; i < values . length ; i ++ ) { array [ i ] = ubyte ( values [ i ] ) ; } return array ; |
public class TokenTypes { /** * Category of a token , a little more detailed than
* < a href = " https : / / docs . oracle . com / javase / specs / jls / se8 / html / jls - 3 . html # jls - 3.5 " > The JLS < / a > . */
public static JavaToken . Category getCategory ( int kind ) { } } | switch ( kind ) { case RULE : return JavaToken . Category . KEYWORD ; case WINDOWS_EOL : case UNIX_EOL : case OLD_MAC_EOL : return JavaToken . Category . EOL ; case EOF : case SPACE : case CTRL_Z : return JavaToken . Category . WHITESPACE_NO_EOL ; case SINGLE_LINE_COMMENT : case JAVADOC_COMMENT : case MULTI_LINE_COMMENT : return JavaToken . Category . COMMENT ; case ABSTRACT : case ASSERT : case BOOLEAN : case BREAK : case BYTE : case CASE : case CATCH : case CHAR : case CLASS : case CONST : case CONTINUE : case _DEFAULT : case DO : case DOUBLE : case ELSE : case EXTENDS : case FALSE : case FINAL : case FINALLY : case FLOAT : case FOR : case GOTO : case IF : case IMPLEMENTS : case IMPORT : case INSTANCEOF : case INT : case INTERFACE : case LONG : case NATIVE : case NEW : case NULL : case PACKAGE : case PRIVATE : case PROTECTED : case PUBLIC : case RETURN : case SHORT : case STATIC : case STRICTFP : case SUPER : case SWITCH : case SYNCHRONIZED : case THIS : case THROW : case THROWS : case TRANSIENT : case TRUE : case TRY : case VOID : case VOLATILE : case WHILE : case REQUIRES : case TO : case WITH : case OPEN : case OPENS : case USES : case MODULE : case EXPORTS : case PROVIDES : case TRANSITIVE : return JavaToken . Category . KEYWORD ; case LONG_LITERAL : case INTEGER_LITERAL : case DECIMAL_LITERAL : case HEX_LITERAL : case OCTAL_LITERAL : case BINARY_LITERAL : case FLOATING_POINT_LITERAL : case DECIMAL_FLOATING_POINT_LITERAL : case DECIMAL_EXPONENT : case HEXADECIMAL_FLOATING_POINT_LITERAL : case HEXADECIMAL_EXPONENT : case CHARACTER_LITERAL : case STRING_LITERAL : case MILLISECOND_LITERAL : case SECOND_LITERAL : case MINUTE_LITERAL : case HOUR_LITERAL : case BIG_INTEGER_LITERAL : case BIG_DECIMAL_LITERAL : return JavaToken . Category . LITERAL ; case IDENTIFIER : return JavaToken . Category . IDENTIFIER ; case LPAREN : case RPAREN : case LBRACE : case RBRACE : case LBRACKET : case RBRACKET : case SEMICOLON : case COMMA : case DOT : case AT : return JavaToken . Category . SEPARATOR ; case MVEL_STARTS_WITH : case MVEL_ENDS_WITH : case MVEL_LENGTH : case NOT : case 150 : case 151 : case 149 : // The following are tokens that are only used internally by the lexer
case ASSIGN : case LT : case BANG : case TILDE : case HOOK : case COLON : case EQ : case LE : case GE : case NE : case SC_OR : case SC_AND : case INCR : case DECR : case PLUS : case MINUS : case STAR : case SLASH : case BIT_AND : case BIT_OR : case XOR : case REM : case LSHIFT : case PLUSASSIGN : case MINUSASSIGN : case STARASSIGN : case SLASHASSIGN : case ANDASSIGN : case ORASSIGN : case XORASSIGN : case REMASSIGN : case LSHIFTASSIGN : case RSIGNEDSHIFTASSIGN : case RUNSIGNEDSHIFTASSIGN : case ELLIPSIS : case ARROW : case DOUBLECOLON : case RUNSIGNEDSHIFT : case RSIGNEDSHIFT : case GT : return JavaToken . Category . OPERATOR ; // These are DRLX tokens , They don ' t have the constants generated
case ENTER_JAVADOC_COMMENT : case ENTER_MULTILINE_COMMENT : case COMMENT_CONTENT : case HEX_DIGITS : case LETTER : case UNICODE_ESCAPE : case PART_LETTER : default : throw new AssertionError ( "Invalid token kind " + kind ) ; } |
public class FileSystemAddons { /** * Replies an URL for the given file and translate it into a
* resource URL if the given file is inside the classpath .
* @ param file is the filename to translate .
* @ param allowRelativePath indicate if a relative path is allowed in the returned URL .
* @ return the URL which is corresponding to file , or < code > null < / code > if
* the url cannot be computed .
* @ see " https : / / github . com / gallandarakhneorg / afc / issues / 173" */
@ Pure public static URL convertFileToURL ( File file , boolean allowRelativePath ) { } } | if ( file == null ) { return null ; } try { File thefile = file ; if ( FileSystem . isWindowsNativeFilename ( file . toString ( ) ) ) { thefile = FileSystem . normalizeWindowsNativeFilename ( file . toString ( ) ) ; if ( thefile == null ) { thefile = file ; } } final URL url ; if ( thefile . isAbsolute ( ) || ! allowRelativePath ) { url = thefile . toURI ( ) . toURL ( ) ; } else { final String [ ] elements = FileSystem . split ( thefile ) ; final StringBuilder path = new StringBuilder ( ) ; for ( final String element : elements ) { if ( path . length ( ) > 0 ) { path . append ( FileSystem . URL_PATH_SEPARATOR ) ; } path . append ( element ) ; } url = new URL ( URISchemeType . FILE . name ( ) . toLowerCase ( ) , null , path . toString ( ) ) ; } return FileSystem . toShortestURL ( url ) ; } catch ( MalformedURLException e ) { return null ; } |
public class BeangleObjectWrapper { /** * of FM . */
@ SuppressWarnings ( "rawtypes" ) protected ModelFactory getModelFactory ( Class clazz ) { } } | if ( altMapWrapper && Map . class . isAssignableFrom ( clazz ) ) { return FriendlyMapModel . FACTORY ; } return super . getModelFactory ( clazz ) ; |
public class CertificateIssuerExtension { /** * Write the extension to the OutputStream .
* @ param out the OutputStream to write the extension to
* @ exception IOException on encoding errors */
public void encode ( OutputStream out ) throws IOException { } } | DerOutputStream tmp = new DerOutputStream ( ) ; if ( extensionValue == null ) { extensionId = PKIXExtensions . CertificateIssuer_Id ; critical = true ; encodeThis ( ) ; } super . encode ( tmp ) ; out . write ( tmp . toByteArray ( ) ) ; |
public class JSONValue { /** * register a deserializer for a class . */
public static < T > void registerReader ( Class < T > type , JsonReaderI < T > mapper ) { } } | defaultReader . registerReader ( type , mapper ) ; |
public class SpiderSession { /** * within the given DBObjectBatch . */
private void copyObjectIDsToBatch ( BatchResult batchResult , DBObjectBatch dbObjBatch ) { } } | if ( batchResult . getResultObjectCount ( ) < dbObjBatch . getObjectCount ( ) ) { m_logger . warn ( "Batch result returned fewer objects ({}) than input batch ({})" , batchResult . getResultObjectCount ( ) , dbObjBatch . getObjectCount ( ) ) ; } Iterator < ObjectResult > resultIter = batchResult . getResultObjects ( ) . iterator ( ) ; Iterator < DBObject > objectIter = dbObjBatch . getObjects ( ) . iterator ( ) ; while ( resultIter . hasNext ( ) ) { if ( ! objectIter . hasNext ( ) ) { m_logger . warn ( "Batch result has more objects ({}) than input batch ({})!" , batchResult . getResultObjectCount ( ) , dbObjBatch . getObjectCount ( ) ) ; break ; } ObjectResult objResult = resultIter . next ( ) ; DBObject dbObj = objectIter . next ( ) ; if ( Utils . isEmpty ( dbObj . getObjectID ( ) ) ) { dbObj . setObjectID ( objResult . getObjectID ( ) ) ; } else if ( ! dbObj . getObjectID ( ) . equals ( objResult . getObjectID ( ) ) ) { m_logger . warn ( "Batch results out of order: expected ID '{}', got '{}'" , dbObj . getObjectID ( ) , objResult . getObjectID ( ) ) ; } } |
public class ZealotKhala { /** * 生成带 " OR " 前缀小于等于查询的SQL片段 .
* @ param field 数据库字段
* @ param value 值
* @ return ZealotKhala实例 */
public ZealotKhala orLessEqual ( String field , Object value ) { } } | return this . doNormal ( ZealotConst . OR_PREFIX , field , value , ZealotConst . LTE_SUFFIX , true ) ; |
public class FaceletTaglibTypeImpl { /** * If not already created , a new < code > function < / code > element will be created and returned .
* Otherwise , the first existing < code > function < / code > element will be returned .
* @ return the instance defined for the element < code > function < / code > */
public FaceletTaglibFunctionType < FaceletTaglibType < T > > getOrCreateFunction ( ) { } } | List < Node > nodeList = childNode . get ( "function" ) ; if ( nodeList != null && nodeList . size ( ) > 0 ) { return new FaceletTaglibFunctionTypeImpl < FaceletTaglibType < T > > ( this , "function" , childNode , nodeList . get ( 0 ) ) ; } return createFunction ( ) ; |
public class GrammarConverter { /** * This method converts the read AST from grammar file into a final grammar
* ready to be used .
* @ throws GrammarException
* @ throws TreeException */
private void convert ( ) throws GrammarException , TreeException { } } | convertOptions ( ) ; convertTokenDefinitions ( ) ; convertProductions ( ) ; grammar = new Grammar ( options , tokenDefinitions , productions ) ; |
public class AmazonCloudFrontClient { /** * Creates a new origin access identity . If you ' re using Amazon S3 for your origin , you can use an origin access
* identity to require users to access your content using a CloudFront URL instead of the Amazon S3 URL . For more
* information about how to use origin access identities , see < a
* href = " http : / / docs . aws . amazon . com / AmazonCloudFront / latest / DeveloperGuide / PrivateContent . html " > Serving Private
* Content through CloudFront < / a > in the < i > Amazon CloudFront Developer Guide < / i > .
* @ param createCloudFrontOriginAccessIdentityRequest
* The request to create a new origin access identity .
* @ return Result of the CreateCloudFrontOriginAccessIdentity operation returned by the service .
* @ throws CloudFrontOriginAccessIdentityAlreadyExistsException
* If the < code > CallerReference < / code > is a value you already sent in a previous request to create an
* identity but the content of the < code > CloudFrontOriginAccessIdentityConfig < / code > is different from the
* original request , CloudFront returns a < code > CloudFrontOriginAccessIdentityAlreadyExists < / code > error .
* @ throws MissingBodyException
* This operation requires a body . Ensure that the body is present and the < code > Content - Type < / code > header
* is set .
* @ throws TooManyCloudFrontOriginAccessIdentitiesException
* Processing your request would cause you to exceed the maximum number of origin access identities allowed .
* @ throws InvalidArgumentException
* The argument is invalid .
* @ throws InconsistentQuantitiesException
* The value of < code > Quantity < / code > and the size of < code > Items < / code > don ' t match .
* @ sample AmazonCloudFront . CreateCloudFrontOriginAccessIdentity
* @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / cloudfront - 2018-11-05 / CreateCloudFrontOriginAccessIdentity "
* target = " _ top " > AWS API Documentation < / a > */
@ Override public CreateCloudFrontOriginAccessIdentityResult createCloudFrontOriginAccessIdentity ( CreateCloudFrontOriginAccessIdentityRequest request ) { } } | request = beforeClientExecution ( request ) ; return executeCreateCloudFrontOriginAccessIdentity ( request ) ; |
public class SchemaUtil { /** * Checks that the type of each of { @ code values } is consistent with the type of
* field { @ code fieldName } declared in the Avro schema ( from { @ code descriptor } ) . */
public static void checkTypeConsistency ( Schema schema , String fieldName , Object ... values ) { } } | checkTypeConsistency ( schema , null , fieldName , values ) ; |
public class DoublesSketch { /** * Puts the current sketch into the given Memory if there is sufficient space , otherwise ,
* throws an error .
* @ param dstMem the given memory .
* @ param compact if true , compacts and sorts the base buffer , which optimizes merge
* performance at the cost of slightly increased serialization time . */
public void putMemory ( final WritableMemory dstMem , final boolean compact ) { } } | if ( isDirect ( ) && ( isCompact ( ) == compact ) ) { final Memory srcMem = getMemory ( ) ; srcMem . copyTo ( 0 , dstMem , 0 , getStorageBytes ( ) ) ; } else { final byte [ ] byteArr = toByteArray ( compact ) ; final int arrLen = byteArr . length ; final long memCap = dstMem . getCapacity ( ) ; if ( memCap < arrLen ) { throw new SketchesArgumentException ( "Destination Memory not large enough: " + memCap + " < " + arrLen ) ; } dstMem . putByteArray ( 0 , byteArr , 0 , arrLen ) ; } |
public class Stax2DomBuilder { /** * / / / / Overridable helper methods : */
protected String getQualified ( String prefix , String localName ) { } } | /* * This mostly / only helps with empty / text - only elements . . . might make
* sense to do ' real ' caching . . . */
if ( localName . equals ( mLastLocalName ) && prefix . endsWith ( mLastPrefix ) ) { return mLastQName ; } String qn = prefix + ':' + localName ; mLastQName = qn ; return qn ; |
public class ConsumerDispatcher { /** * Indicates whether a subscription associated with the ConsumerDispatcher has
* been stored in the MatchSpace .
* @ return isInMatchSpace */
public boolean isInMatchSpace ( ) { } } | if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) { SibTr . entry ( tc , "isInMatchSpace" ) ; SibTr . exit ( tc , "isInMatchSpace" , Boolean . valueOf ( isInMatchSpace ) ) ; } return isInMatchSpace ; |
public class AbstractMapWritable { /** * Used by child copy constructors . */
protected synchronized void copy ( Writable other ) { } } | if ( other != null ) { try { DataOutputBuffer out = new DataOutputBuffer ( ) ; other . write ( out ) ; DataInputBuffer in = new DataInputBuffer ( ) ; in . reset ( out . getData ( ) , out . getLength ( ) ) ; readFields ( in ) ; } catch ( IOException e ) { throw new IllegalArgumentException ( "map cannot be copied: " + e . getMessage ( ) ) ; } } else { throw new IllegalArgumentException ( "source map cannot be null" ) ; } |
public class UniqueWordSequenceExtractor { /** * This method gets as input a string and returns as output a numbered sequence
* of the unique tokens . In the returned map as keys we store the position of the word
* in the original string and as value the actual unique token in that position .
* Note that the sequence includes only the position of the first occurrence of
* each word while the next occurrences are ignored .
* @ param text
* @ return */
@ Override public Map < Integer , String > extract ( final String text ) { } } | Set < String > tmpKwd = new LinkedHashSet < > ( generateTokenizer ( ) . tokenize ( text ) ) ; Map < Integer , String > keywordSequence = new LinkedHashMap < > ( ) ; int position = 0 ; for ( String keyword : tmpKwd ) { keywordSequence . put ( position , keyword ) ; ++ position ; } return keywordSequence ; |
public class ParsedScheduleExpression { /** * Determines the next timeout for the schedule expression .
* @ param lastTimeout the last timeout in milliseconds , or the current time
* if reschedule is false
* @ param reschedule < tt > true < / tt > if lastTimeout is an exclusive lower
* bound for the next timeout
* @ return the next timeout in milliseconds , or - 1 if there are no more
* future timeouts for the expression
* @ throws IllegalArgumentException if the expression contains an invalid
* attribute value */
private long getTimeout ( long lastTimeout , boolean reschedule ) { } } | Calendar cal = createCalendar ( lastTimeout ) ; // If this expression is being rescheduled , then add a second to make
// progress towards the next timeout .
if ( reschedule ) { cal . add ( Calendar . SECOND , 1 ) ; } // d666295 - Otherwise , if this method is called from getFirstTimeout ,
// we ' re using the current time rather than the start time , and the
// current time is mid - second , then we also want to advance to the next
// second . Note that the parser has already guaranteed that the start
// time has been rounded .
else if ( lastTimeout != start && lastTimeout % 1000 != 0 ) // d666295
{ cal . set ( Calendar . MILLISECOND , 0 ) ; cal . add ( Calendar . SECOND , 1 ) ; } if ( ! advance ( cal ) ) { return - 1 ; } return cal . getTimeInMillis ( ) ; |
public class Link { /** * Create a new { @ code Link } object with the given parameters .
* @ param href the hyperlink ( mandatory )
* @ param text the text of the hyperlink ( optional )
* @ param type the mime type of the content , e . g . { @ code image / jpeg }
* ( optional )
* @ return a new { @ code Link } object with the given parameters
* @ throws NullPointerException if the given { @ code href } is { @ code null } */
public static Link of ( final URI href , final String text , final String type ) { } } | return new Link ( href , text , type ) ; |
public class SimpleLdapSecurityContext { /** * Authenticates the user . */
@ Override public synchronized void authenticate ( ) throws PortalSecurityException { } } | this . isauth = false ; ILdapServer ldapConn ; ldapConn = LdapServices . getDefaultLdapServer ( ) ; String creds = new String ( this . myOpaqueCredentials . credentialstring ) ; if ( this . myPrincipal . UID != null && ! this . myPrincipal . UID . trim ( ) . equals ( "" ) && this . myOpaqueCredentials . credentialstring != null && ! creds . trim ( ) . equals ( "" ) ) { DirContext conn = null ; NamingEnumeration results = null ; StringBuffer user = new StringBuffer ( "(" ) ; String first_name = null ; String last_name = null ; user . append ( ldapConn . getUidAttribute ( ) ) . append ( "=" ) ; user . append ( this . myPrincipal . UID ) . append ( ")" ) ; log . debug ( "SimpleLdapSecurityContext: Looking for {}" , user . toString ( ) ) ; try { conn = ldapConn . getConnection ( ) ; // set up search controls
SearchControls searchCtls = new SearchControls ( ) ; searchCtls . setReturningAttributes ( attributes ) ; searchCtls . setSearchScope ( SearchControls . SUBTREE_SCOPE ) ; // do lookup
if ( conn != null ) { try { results = conn . search ( ldapConn . getBaseDN ( ) , user . toString ( ) , searchCtls ) ; if ( results != null ) { if ( ! results . hasMore ( ) ) { log . error ( "SimpleLdapSecurityContext: user not found: {}" , this . myPrincipal . UID ) ; } while ( results != null && results . hasMore ( ) ) { SearchResult entry = ( SearchResult ) results . next ( ) ; StringBuffer dnBuffer = new StringBuffer ( ) ; dnBuffer . append ( entry . getName ( ) ) . append ( ", " ) ; dnBuffer . append ( ldapConn . getBaseDN ( ) ) ; Attributes attrs = entry . getAttributes ( ) ; first_name = getAttributeValue ( attrs , ATTR_FIRSTNAME ) ; last_name = getAttributeValue ( attrs , ATTR_LASTNAME ) ; // re - bind as user
conn . removeFromEnvironment ( javax . naming . Context . SECURITY_PRINCIPAL ) ; conn . removeFromEnvironment ( javax . naming . Context . SECURITY_CREDENTIALS ) ; conn . addToEnvironment ( javax . naming . Context . SECURITY_PRINCIPAL , dnBuffer . toString ( ) ) ; conn . addToEnvironment ( javax . naming . Context . SECURITY_CREDENTIALS , this . myOpaqueCredentials . credentialstring ) ; searchCtls = new SearchControls ( ) ; searchCtls . setReturningAttributes ( new String [ 0 ] ) ; searchCtls . setSearchScope ( SearchControls . OBJECT_SCOPE ) ; String attrSearch = "(" + ldapConn . getUidAttribute ( ) + "=*)" ; log . debug ( "SimpleLdapSecurityContext: Looking in {} for {}" , dnBuffer . toString ( ) , attrSearch ) ; conn . search ( dnBuffer . toString ( ) , attrSearch , searchCtls ) ; this . isauth = true ; this . myPrincipal . FullName = first_name + " " + last_name ; log . debug ( "SimpleLdapSecurityContext: User {} ({}) is authenticated" , this . myPrincipal . UID , this . myPrincipal . FullName ) ; // Since LDAP is case - insensitive with respect to uid , force
// user name to lower case for use by the portal
this . myPrincipal . UID = this . myPrincipal . UID . toLowerCase ( ) ; } // while ( results ! = null & & results . hasMore ( ) )
} else { log . error ( "SimpleLdapSecurityContext: No such user: {}" , this . myPrincipal . UID ) ; } } catch ( AuthenticationException ae ) { log . info ( "SimpleLdapSecurityContext: Password invalid for user: " + this . myPrincipal . UID ) ; } catch ( Exception e ) { log . error ( "SimpleLdapSecurityContext: LDAP Error with user: " + this . myPrincipal . UID + "; " , e ) ; throw new PortalSecurityException ( "SimpleLdapSecurityContext: LDAP Error" + e + " with user: " + this . myPrincipal . UID ) ; } finally { ldapConn . releaseConnection ( conn ) ; } } else { log . error ( "LDAP Server Connection unavailable" ) ; } } catch ( final NamingException ne ) { log . error ( "Error getting connection to LDAP server." , ne ) ; } } else { // If the principal and / or credential are missing , the context authentication
// simply fails . It should not be construed that this is an error . It happens for guest
// access .
log . info ( "Principal or OpaqueCredentials not initialized prior to authenticate" ) ; } // Ok . . . we are now ready to authenticate all of our subcontexts .
super . authenticate ( ) ; return ; |
public class DefaultExpander { /** * Add the new mapping to this expander .
* @ param mapping */
public void addDSLMapping ( final DSLMapping mapping ) { } } | for ( DSLMappingEntry entry : mapping . getEntries ( ) ) { if ( DSLMappingEntry . KEYWORD . equals ( entry . getSection ( ) ) ) { this . keywords . add ( entry ) ; } else if ( DSLMappingEntry . CONDITION . equals ( entry . getSection ( ) ) ) { this . condition . add ( entry ) ; } else if ( DSLMappingEntry . CONSEQUENCE . equals ( entry . getSection ( ) ) ) { this . consequence . add ( entry ) ; } else { // if any , then add to them both condition and consequence
this . condition . add ( entry ) ; this . consequence . add ( entry ) ; } } if ( mapping . getOption ( "result" ) ) showResult = true ; if ( mapping . getOption ( "steps" ) ) showSteps = true ; if ( mapping . getOption ( "keyword" ) ) showKeyword = true ; if ( mapping . getOption ( "when" ) ) showWhen = true ; if ( mapping . getOption ( "then" ) ) showThen = true ; if ( mapping . getOption ( "usage" ) ) showUsage = true ; |
public class PackagePrefixes { /** * Return the prefix for a specified package . If a prefix was specified
* for the package , then that prefix is returned . Otherwise , a camel - cased
* prefix is created from the package name . */
public String getPrefix ( PackageElement packageElement ) { } } | if ( packageElement == null ) { return "" ; } String packageName = packageElement . getQualifiedName ( ) . toString ( ) ; String prefix = getPrefix ( packageName ) ; if ( prefix != null ) { return prefix ; } prefix = packageLookup . getObjectiveCName ( packageName ) ; if ( prefix == null ) { prefix = NameTable . camelCaseQualifiedName ( packageName ) ; } addPrefix ( packageName , prefix ) ; return prefix ; |
public class UIStateRegistry { /** * Register a state for the passed HC element , using the internal ID of the
* element .
* @ param aNewElement
* The element to be added to the registry . May not be
* < code > null < / code > .
* @ return Never < code > null < / code > . */
@ Nonnull public EChange registerState ( @ Nonnull final IHCElement < ? > aNewElement ) { } } | ValueEnforcer . notNull ( aNewElement , "NewElement" ) ; if ( aNewElement . hasNoID ( ) ) LOGGER . warn ( "Registering the state for an object that has no ID - creating a new ID now!" ) ; return registerState ( aNewElement . ensureID ( ) . getID ( ) , aNewElement ) ; |
public class AmazonLightsailClient { /** * Attaches a static IP address to a specific Amazon Lightsail instance .
* @ param attachStaticIpRequest
* @ return Result of the AttachStaticIp operation returned by the service .
* @ throws ServiceException
* A general service exception .
* @ throws InvalidInputException
* Lightsail throws this exception when user input does not conform to the validation rules of an input
* field . < / p > < note >
* Domain - related APIs are only available in the N . Virginia ( us - east - 1 ) Region . Please set your AWS Region
* configuration to us - east - 1 to create , view , or edit these resources .
* @ throws NotFoundException
* Lightsail throws this exception when it cannot find a resource .
* @ throws OperationFailureException
* Lightsail throws this exception when an operation fails to execute .
* @ throws AccessDeniedException
* Lightsail throws this exception when the user cannot be authenticated or uses invalid credentials to
* access a resource .
* @ throws AccountSetupInProgressException
* Lightsail throws this exception when an account is still in the setup in progress state .
* @ throws UnauthenticatedException
* Lightsail throws this exception when the user has not been authenticated .
* @ sample AmazonLightsail . AttachStaticIp
* @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / lightsail - 2016-11-28 / AttachStaticIp " target = " _ top " > AWS API
* Documentation < / a > */
@ Override public AttachStaticIpResult attachStaticIp ( AttachStaticIpRequest request ) { } } | request = beforeClientExecution ( request ) ; return executeAttachStaticIp ( request ) ; |
public class CliCommandBuilder { /** * Adds a JVM argument to the command ignoring { @ code null } arguments .
* @ param jvmArg the JVM argument to add
* @ return the builder */
public CliCommandBuilder addJavaOption ( final String jvmArg ) { } } | if ( jvmArg != null && ! jvmArg . trim ( ) . isEmpty ( ) ) { javaOpts . add ( jvmArg ) ; } return this ; |
public class HadoopOutputFormatBase { /** * create the temporary output file for hadoop RecordWriter .
* @ param taskNumber The number of the parallel instance .
* @ param numTasks The number of parallel tasks .
* @ throws java . io . IOException */
@ Override public void open ( int taskNumber , int numTasks ) throws IOException { } } | // enforce sequential open ( ) calls
synchronized ( OPEN_MUTEX ) { if ( Integer . toString ( taskNumber + 1 ) . length ( ) > 6 ) { throw new IOException ( "Task id too large." ) ; } this . taskNumber = taskNumber + 1 ; // for hadoop 2.2
this . configuration . set ( "mapreduce.output.basename" , "tmp" ) ; TaskAttemptID taskAttemptID = TaskAttemptID . forName ( "attempt__0000_r_" + String . format ( "%" + ( 6 - Integer . toString ( taskNumber + 1 ) . length ( ) ) + "s" , " " ) . replace ( " " , "0" ) + Integer . toString ( taskNumber + 1 ) + "_0" ) ; this . configuration . set ( "mapred.task.id" , taskAttemptID . toString ( ) ) ; this . configuration . setInt ( "mapred.task.partition" , taskNumber + 1 ) ; // for hadoop 2.2
this . configuration . set ( "mapreduce.task.attempt.id" , taskAttemptID . toString ( ) ) ; this . configuration . setInt ( "mapreduce.task.partition" , taskNumber + 1 ) ; try { this . context = new TaskAttemptContextImpl ( this . configuration , taskAttemptID ) ; this . outputCommitter = this . mapreduceOutputFormat . getOutputCommitter ( this . context ) ; this . outputCommitter . setupJob ( new JobContextImpl ( this . configuration , new JobID ( ) ) ) ; } catch ( Exception e ) { throw new RuntimeException ( e ) ; } this . context . getCredentials ( ) . addAll ( this . credentials ) ; Credentials currentUserCreds = getCredentialsFromUGI ( UserGroupInformation . getCurrentUser ( ) ) ; if ( currentUserCreds != null ) { this . context . getCredentials ( ) . addAll ( currentUserCreds ) ; } // compatible for hadoop 2.2.0 , the temporary output directory is different from hadoop 1.2.1
if ( outputCommitter instanceof FileOutputCommitter ) { this . configuration . set ( "mapreduce.task.output.dir" , ( ( FileOutputCommitter ) this . outputCommitter ) . getWorkPath ( ) . toString ( ) ) ; } try { this . recordWriter = this . mapreduceOutputFormat . getRecordWriter ( this . context ) ; } catch ( InterruptedException e ) { throw new IOException ( "Could not create RecordWriter." , e ) ; } } |
public class OwncloudUtils { /** * Convert a OwncloudResource to a OwncloudFileResource .
* @ param owncloudResource OwncloudResource
* @ return converted to OwncloudFileResource
* @ throws OwncloudNoFileResourceException if the OwncloudResource is not convertable to a OwncloudFileResource */
public static OwncloudFileResource toOwncloudFileResource ( OwncloudResource owncloudResource ) throws OwncloudNoFileResourceException { } } | if ( owncloudResource == null ) { return null ; } if ( isDirectory ( owncloudResource ) || ! ClassUtils . isAssignable ( owncloudResource . getClass ( ) , OwncloudFileResource . class ) ) { throw new OwncloudNoFileResourceException ( owncloudResource . getHref ( ) ) ; } return ( OwncloudFileResource ) owncloudResource ; |
public class DetectFiducialSquareImage { /** * Computes the hamming score between two descriptions . Larger the number better the fit */
protected int hamming ( short [ ] a , short [ ] b ) { } } | int distance = 0 ; for ( int i = 0 ; i < a . length ; i ++ ) { distance += DescriptorDistance . hamming ( ( a [ i ] & 0xFFFF ) ^ ( b [ i ] & 0xFFFF ) ) ; } return distance ; |
public class Operation { /** * < pre >
* The normal response of the operation in case of success . If the original
* method returns no data on success , such as ` Delete ` , the response is
* ` google . protobuf . Empty ` . If the original method is standard
* ` Get ` / ` Create ` / ` Update ` , the response should be the resource . For other
* methods , the response should have the type ` XxxResponse ` , where ` Xxx `
* is the original method name . For example , if the original method name
* is ` TakeSnapshot ( ) ` , the inferred response type is
* ` TakeSnapshotResponse ` .
* < / pre >
* < code > . google . protobuf . Any response = 5 ; < / code > */
public com . google . protobuf . AnyOrBuilder getResponseOrBuilder ( ) { } } | if ( resultCase_ == 5 ) { return ( com . google . protobuf . Any ) result_ ; } return com . google . protobuf . Any . getDefaultInstance ( ) ; |
public class IOUtil { /** * 创建输出流 ( 经过IO适配器创建 )
* @ param path
* @ return
* @ throws IOException */
public static OutputStream newOutputStream ( String path ) throws IOException { } } | if ( IOAdapter == null ) return new FileOutputStream ( path ) ; return IOAdapter . create ( path ) ; |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.