signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class BooleanCondition { /** * Condition on arbitrary input * @ param input the input to return * the condition for * @ return true if the condition is met * false otherwise */ @ Override public boolean condition ( Object input ) { } }
switch ( type ) { case AND : for ( Condition c : conditions ) { boolean thisCond = c . condition ( input ) ; if ( ! thisCond ) return false ; // Any false - > AND is false } return true ; case OR : for ( Condition c : conditions ) { boolean thisCond = c . condition ( input ) ; if ( thisCond ) return true ; // Any true - > OR is true } return false ; case NOT : return ! conditions [ 0 ] . condition ( input ) ; case XOR : return conditions [ 0 ] . condition ( input ) ^ conditions [ 1 ] . condition ( input ) ; default : throw new RuntimeException ( "Unknown condition type: " + type ) ; }
public class Template { /** * Checks if the given page uses a specific template . * @ param page AEM page * @ param templates Template ( s ) * @ return true if the page uses the template */ public static boolean is ( @ NotNull Page page , @ NotNull TemplatePathInfo @ NotNull . . . templates ) { } }
if ( page == null || templates == null || templates . length == 0 ) { return false ; } String templatePath = page . getProperties ( ) . get ( NameConstants . PN_TEMPLATE , String . class ) ; for ( TemplatePathInfo template : templates ) { if ( template . getTemplatePath ( ) . equals ( templatePath ) ) { return true ; } } return false ;
public class StreamingFileSink { /** * Creates the builder for a { @ code StreamingFileSink } with row - encoding format . * @ param basePath the base path where all the buckets are going to be created as sub - directories . * @ param encoder the { @ link Encoder } to be used when writing elements in the buckets . * @ param < IN > the type of incoming elements * @ return The builder where the remaining of the configuration parameters for the sink can be configured . * In order to instantiate the sink , call { @ link RowFormatBuilder # build ( ) } after specifying the desired parameters . */ public static < IN > StreamingFileSink . RowFormatBuilder < IN , String > forRowFormat ( final Path basePath , final Encoder < IN > encoder ) { } }
return new StreamingFileSink . RowFormatBuilder < > ( basePath , encoder , new DateTimeBucketAssigner < > ( ) ) ;
public class HttpUtils { /** * Compress a byte array using GZIP with the given compression level . */ static byte [ ] gzip ( byte [ ] data , int level ) throws IOException { } }
ByteArrayOutputStream baos = new ByteArrayOutputStream ( data . length ) ; try ( GzipLevelOutputStream out = new GzipLevelOutputStream ( baos ) ) { out . setLevel ( level ) ; out . write ( data ) ; } return baos . toByteArray ( ) ;
public class ConsumerUtil { /** * Creates a Key object from the certificate stored in the trust store and alias * provided . */ Key getPublicKey ( String trustedAlias , String trustStoreRef , String signatureAlgorithm ) throws KeyStoreServiceException , KeyException { } }
Key signingKey = getPublicKeyFromKeystore ( trustedAlias , trustStoreRef , signatureAlgorithm ) ; if ( tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "Trusted alias: " + trustedAlias + ", Truststore: " + trustStoreRef ) ; Tr . debug ( tc , "RSAPublicKey: " + ( signingKey instanceof RSAPublicKey ) ) ; } if ( signingKey != null && ! ( signingKey instanceof RSAPublicKey ) ) { signingKey = null ; } return signingKey ;
public class StandaloneAetherArtifactFileResolver { /** * Retrieves a new instance of a standalone { @ link org . eclipse . aether . RepositorySystemSession } , which { @ link org . eclipse . aether . repository . LocalRepositoryManager } points to ' target / local - repository ' dir . * @ param system * @ return */ private static RepositorySystemSession newRepositorySystemSession ( RepositorySystem system , File localRepositoryBaseDir ) { } }
final DefaultRepositorySystemSession session = MavenRepositorySystemUtils . newSession ( ) ; final LocalRepositoryManager localRepositoryManager = system . newLocalRepositoryManager ( session , new LocalRepository ( localRepositoryBaseDir ) ) ; session . setLocalRepositoryManager ( localRepositoryManager ) ; return session ;
public class JobInitializationPoller { /** * Method which is used by the poller to assign appropriate worker thread * to a queue . The number of threads would be always less than or equal * to number of queues in a system . If number of threads is configured to * be more than number of queues then poller does not create threads more * than number of queues . */ private void assignThreadsToQueues ( ) { } }
int countOfQueues = jobQueues . size ( ) ; String [ ] queues = ( String [ ] ) jobQueues . keySet ( ) . toArray ( new String [ countOfQueues ] ) ; int numberOfQueuesPerThread = countOfQueues / poolSize ; int numberOfQueuesAssigned = 0 ; for ( int i = 0 ; i < poolSize ; i ++ ) { JobInitializationThread initializer = createJobInitializationThread ( ) ; int batch = ( i * numberOfQueuesPerThread ) ; for ( int j = batch ; j < ( batch + numberOfQueuesPerThread ) ; j ++ ) { initializer . addQueue ( queues [ j ] ) ; threadsToQueueMap . put ( queues [ j ] , initializer ) ; numberOfQueuesAssigned ++ ; } } if ( numberOfQueuesAssigned < countOfQueues ) { // Assign remaining queues in round robin fashion to other queues int startIndex = 0 ; for ( int i = numberOfQueuesAssigned ; i < countOfQueues ; i ++ ) { JobInitializationThread t = threadsToQueueMap . get ( queues [ startIndex ] ) ; t . addQueue ( queues [ i ] ) ; threadsToQueueMap . put ( queues [ i ] , t ) ; startIndex ++ ; } }
public class ConfigHolder { /** * All the valid config options are enums in this package that implement { @ link ConfigSetting } . */ @ Override protected boolean isExpectedType ( @ SuppressWarnings ( "rawtypes" ) Class < ? extends ConfigurationSetting > c ) { } }
return c . isEnum ( ) && ConfigSetting . class . isAssignableFrom ( c ) && c . getPackage ( ) == ConfigSetting . class . getPackage ( ) ;
public class DAFileWriter { /** * Appends import statements for the specified { @ link DAImport } , only removing duplicates and imports of " java . lang " * and sorting them ( using { @ link String } ' s comparable implementation on { @ link DAImport # getQualifiedName ( ) } ) for * reproductive behavior . * @ param mapperImports a { @ link Collections } of { @ link DAImport } * @ return the current { @ link DAFileWriter } * @ throws IOException if an { @ link IOException } occurs writing the imports */ public DAFileWriter appendImports ( @ Nonnull Collection < DAImport > mapperImports ) throws IOException { } }
checkNotNull ( mapperImports , "Collection of imports can not be null" ) ; if ( mapperImports . isEmpty ( ) ) { return this ; } List < DAName > imports = removeDuplicatesFilterJavaLangAndSortImports ( mapperImports ) ; if ( imports . isEmpty ( ) ) { this . importQualifiedNames = Collections . emptySet ( ) ; this . importSimpleNames = Collections . emptySet ( ) ; return this ; } this . importQualifiedNames = from ( mapperImports ) . transform ( DAImportFunctions . toQualifiedName ( ) ) . filter ( notNull ( ) ) . toSet ( ) ; this . importSimpleNames = from ( mapperImports ) . transform ( DAImportFunctions . toSimpleName ( ) ) . filter ( notNull ( ) ) . toSet ( ) ; for ( DAName name : imports ) { writer . append ( "import " ) . append ( name ) . append ( ";" ) ; writer . newLine ( ) ; } writer . newLine ( ) ; return this ;
public class QueryImpl { /** * return a string list of all columns * @ return string list */ public String getColumnlist ( boolean upperCase ) { } }
StringBuilder sb = new StringBuilder ( ) ; for ( int i = 0 ; i < columnNames . length ; i ++ ) { if ( i > 0 ) sb . append ( ',' ) ; sb . append ( upperCase ? columnNames [ i ] . getUpperString ( ) : columnNames [ i ] . getString ( ) ) ; } return sb . toString ( ) ;
public class ProxyFactory { /** * Method to create a new proxy that wraps the bean instance . * @ param beanInstance the bean instance * @ return a new proxy object */ public T create ( BeanInstance beanInstance ) { } }
final T proxy = ( System . getSecurityManager ( ) == null ) ? run ( ) : AccessController . doPrivileged ( this ) ; ( ( ProxyObject ) proxy ) . weld_setHandler ( new ProxyMethodHandler ( contextId , beanInstance , bean ) ) ; return proxy ;
public class GLContext { /** * Makes the supplied shader the current shader , flushing any previous shader . */ public boolean useShader ( GLShader shader ) { } }
if ( curShader == shader ) return false ; checkGLError ( "useShader" ) ; flush ( true ) ; curShader = shader ; return true ;
public class EnumConstantBuilder { /** * Build the enum constant documentation . * @ param node the XML element that specifies which components to document * @ param memberDetailsTree the content tree to which the documentation will be added * @ throws DocletException is there is a problem while building the documentation */ public void buildEnumConstant ( XMLNode node , Content memberDetailsTree ) throws DocletException { } }
if ( writer == null ) { return ; } if ( hasMembersToDocument ( ) ) { Content enumConstantsDetailsTree = writer . getEnumConstantsDetailsTreeHeader ( typeElement , memberDetailsTree ) ; Element lastElement = enumConstants . get ( enumConstants . size ( ) - 1 ) ; for ( Element enumConstant : enumConstants ) { currentElement = ( VariableElement ) enumConstant ; Content enumConstantsTree = writer . getEnumConstantsTreeHeader ( currentElement , enumConstantsDetailsTree ) ; buildChildren ( node , enumConstantsTree ) ; enumConstantsDetailsTree . addContent ( writer . getEnumConstants ( enumConstantsTree , currentElement == lastElement ) ) ; } memberDetailsTree . addContent ( writer . getEnumConstantsDetails ( enumConstantsDetailsTree ) ) ; }
public class DescribeDocumentRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( DescribeDocumentRequest describeDocumentRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( describeDocumentRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( describeDocumentRequest . getName ( ) , NAME_BINDING ) ; protocolMarshaller . marshall ( describeDocumentRequest . getDocumentVersion ( ) , DOCUMENTVERSION_BINDING ) ; protocolMarshaller . marshall ( describeDocumentRequest . getVersionName ( ) , VERSIONNAME_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class RequestHttp1 { /** * Returns the header value for the key , returned as a CharSegment . */ @ Override public CharSegment getHeaderBuffer ( String key ) { } }
int i = matchNextHeader ( 0 , key ) ; if ( i >= 0 ) { return _headerValues [ i ] ; } else { return null ; }
public class CmsFileExplorer { /** * Handles the file tree click events . < p > * @ param event the event */ void handleFileTreeClick ( ItemClickEvent event ) { } }
Item resourceItem = m_treeContainer . getItem ( event . getItemId ( ) ) ; if ( ( resourceItem . getItemProperty ( CmsResourceTableProperty . PROPERTY_DISABLED ) . getValue ( ) == null ) || ! ( ( Boolean ) resourceItem . getItemProperty ( CmsResourceTableProperty . PROPERTY_DISABLED ) . getValue ( ) ) . booleanValue ( ) ) { // don ' t handle disabled item clicks try { readFolder ( ( CmsUUID ) event . getItemId ( ) ) ; } catch ( CmsException e ) { CmsErrorDialog . showErrorDialog ( CmsVaadinUtils . getMessageText ( Messages . ERR_EXPLORER_CAN_NOT_READ_RESOURCE_1 , event . getItemId ( ) ) , e ) ; LOG . error ( e . getLocalizedMessage ( ) , e ) ; } }
public class LBiObjFltConsumerBuilder { /** * One of ways of creating builder . This is possibly the least verbose way where compiler should be able to guess the generic parameters . */ @ Nonnull public static < T1 , T2 > LBiObjFltConsumer < T1 , T2 > biObjFltConsumerFrom ( Consumer < LBiObjFltConsumerBuilder < T1 , T2 > > buildingFunction ) { } }
LBiObjFltConsumerBuilder builder = new LBiObjFltConsumerBuilder ( ) ; buildingFunction . accept ( builder ) ; return builder . build ( ) ;
public class Animation { /** * Draw the animation at a specific location * @ param x The x position to draw the animation at * @ param y The y position to draw the animation at * @ param filter The filter to apply */ @ Override public void draw ( float x , float y , Color filter ) { } }
draw ( x , y , getWidth ( ) , getHeight ( ) , filter ) ;
public class DescribeMaintenanceWindowScheduleRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( DescribeMaintenanceWindowScheduleRequest describeMaintenanceWindowScheduleRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( describeMaintenanceWindowScheduleRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( describeMaintenanceWindowScheduleRequest . getWindowId ( ) , WINDOWID_BINDING ) ; protocolMarshaller . marshall ( describeMaintenanceWindowScheduleRequest . getTargets ( ) , TARGETS_BINDING ) ; protocolMarshaller . marshall ( describeMaintenanceWindowScheduleRequest . getResourceType ( ) , RESOURCETYPE_BINDING ) ; protocolMarshaller . marshall ( describeMaintenanceWindowScheduleRequest . getFilters ( ) , FILTERS_BINDING ) ; protocolMarshaller . marshall ( describeMaintenanceWindowScheduleRequest . getMaxResults ( ) , MAXRESULTS_BINDING ) ; protocolMarshaller . marshall ( describeMaintenanceWindowScheduleRequest . getNextToken ( ) , NEXTTOKEN_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class RingIterator { /** * Returns a stream that iterates all NavigableMap entries ascending starting from key and * ending entry before key . * @ param < K > * @ param < V > * @ param key * @ param map * @ param parallel * @ return */ public static final < K , V > Stream < Entry < K , V > > stream ( K key , NavigableMap < K , V > map , boolean parallel ) { } }
return StreamSupport . stream ( spliterator ( key , map ) , parallel ) ;
public class UniqueId { /** * Attempts to run the PutRequest given in argument , retrying if needed . * Puts are synchronized . * @ param put The PutRequest to execute . * @ param attempts The maximum number of attempts . * @ param wait The initial amount of time in ms to sleep for after a * failure . This amount is doubled after each failed attempt . * @ throws HBaseException if all the attempts have failed . This exception * will be the exception of the last attempt . */ private void hbasePutWithRetry ( final PutRequest put , short attempts , short wait ) throws HBaseException { } }
put . setBufferable ( false ) ; // TODO ( tsuna ) : Remove once this code is async . while ( attempts -- > 0 ) { try { client . put ( put ) . joinUninterruptibly ( ) ; return ; } catch ( HBaseException e ) { if ( attempts > 0 ) { LOG . error ( "Put failed, attempts left=" + attempts + " (retrying in " + wait + " ms), put=" + put , e ) ; try { Thread . sleep ( wait ) ; } catch ( InterruptedException ie ) { throw new RuntimeException ( "interrupted" , ie ) ; } wait *= 2 ; } else { throw e ; } } catch ( Exception e ) { LOG . error ( "WTF? Unexpected exception type, put=" + put , e ) ; } } throw new IllegalStateException ( "This code should never be reached!" ) ;
public class ScheduledInstancesLaunchSpecification { /** * The IDs of the security groups . * @ param securityGroupIds * The IDs of the security groups . */ public void setSecurityGroupIds ( java . util . Collection < String > securityGroupIds ) { } }
if ( securityGroupIds == null ) { this . securityGroupIds = null ; return ; } this . securityGroupIds = new com . amazonaws . internal . SdkInternalList < String > ( securityGroupIds ) ;
public class JLanguageTool { /** * Tokenizes the given { @ code sentence } into words and analyzes it . * This is the same as { @ link # getAnalyzedSentence ( String ) } but it does not run * the disambiguator . * @ param sentence sentence to be analyzed * @ since 0.9.8 */ public AnalyzedSentence getRawAnalyzedSentence ( String sentence ) throws IOException { } }
List < String > tokens = language . getWordTokenizer ( ) . tokenize ( sentence ) ; Map < Integer , String > softHyphenTokens = replaceSoftHyphens ( tokens ) ; List < AnalyzedTokenReadings > aTokens = language . getTagger ( ) . tag ( tokens ) ; if ( language . getChunker ( ) != null ) { language . getChunker ( ) . addChunkTags ( aTokens ) ; } int numTokens = aTokens . size ( ) ; int posFix = 0 ; for ( int i = 1 ; i < numTokens ; i ++ ) { aTokens . get ( i ) . setWhitespaceBefore ( aTokens . get ( i - 1 ) . isWhitespace ( ) ) ; aTokens . get ( i ) . setStartPos ( aTokens . get ( i ) . getStartPos ( ) + posFix ) ; if ( ! softHyphenTokens . isEmpty ( ) && softHyphenTokens . get ( i ) != null ) { aTokens . get ( i ) . addReading ( language . getTagger ( ) . createToken ( softHyphenTokens . get ( i ) , null ) ) ; posFix += softHyphenTokens . get ( i ) . length ( ) - aTokens . get ( i ) . getToken ( ) . length ( ) ; } } AnalyzedTokenReadings [ ] tokenArray = new AnalyzedTokenReadings [ tokens . size ( ) + 1 ] ; AnalyzedToken [ ] startTokenArray = new AnalyzedToken [ 1 ] ; int toArrayCount = 0 ; AnalyzedToken sentenceStartToken = new AnalyzedToken ( "" , SENTENCE_START_TAGNAME , null ) ; startTokenArray [ 0 ] = sentenceStartToken ; tokenArray [ toArrayCount ++ ] = new AnalyzedTokenReadings ( startTokenArray , 0 ) ; int startPos = 0 ; for ( AnalyzedTokenReadings posTag : aTokens ) { posTag . setStartPos ( startPos ) ; tokenArray [ toArrayCount ++ ] = posTag ; startPos += posTag . getToken ( ) . length ( ) ; } // add additional tags int lastToken = toArrayCount - 1 ; // make SENT _ END appear at last not whitespace token for ( int i = 0 ; i < toArrayCount - 1 ; i ++ ) { if ( ! tokenArray [ lastToken - i ] . isWhitespace ( ) ) { lastToken -= i ; break ; } } tokenArray [ lastToken ] . setSentEnd ( ) ; if ( tokenArray . length == lastToken + 1 && tokenArray [ lastToken ] . isLinebreak ( ) ) { tokenArray [ lastToken ] . setParagraphEnd ( ) ; } return new AnalyzedSentence ( tokenArray ) ;
public class AWSGreengrassClient { /** * Creates a subscription definition . You may provide the initial version of the subscription definition now or use * ' ' CreateSubscriptionDefinitionVersion ' ' at a later time . * @ param createSubscriptionDefinitionRequest * @ return Result of the CreateSubscriptionDefinition operation returned by the service . * @ throws BadRequestException * invalid request * @ sample AWSGreengrass . CreateSubscriptionDefinition * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / greengrass - 2017-06-07 / CreateSubscriptionDefinition " * target = " _ top " > AWS API Documentation < / a > */ @ Override public CreateSubscriptionDefinitionResult createSubscriptionDefinition ( CreateSubscriptionDefinitionRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeCreateSubscriptionDefinition ( request ) ;
public class EscapeTool { /** * URL Encodes the provided text . * @ param text the text to encode * @ return the URL encoded text */ public String url ( String text ) { } }
if ( text == null || text . isEmpty ( ) ) { return text ; } try { return URLEncoder . encode ( text , UTF_8 ) ; } catch ( UnsupportedEncodingException ex ) { LOGGER . warn ( "UTF-8 is not supported?" ) ; LOGGER . info ( "" , ex ) ; } return "" ;
public class StringWalker { /** * Applies the { @ link TextFormatting } as a new style . * @ param format the format */ protected void applyStyle ( TextFormatting format ) { } }
if ( format == TextFormatting . RESET ) { FontOptions fontOptions = styles . getFirst ( ) ; styles . clear ( ) ; styles . add ( fontOptions ) ; return ; } FontOptionsBuilder builder = currentStyle ( ) . toBuilder ( ) ; builder . styles ( format ) ; styles . add ( builder . build ( ) ) ;
public class EntityRecognizerAnnotationsMarshaller { /** * Marshall the given parameter object . */ public void marshall ( EntityRecognizerAnnotations entityRecognizerAnnotations , ProtocolMarshaller protocolMarshaller ) { } }
if ( entityRecognizerAnnotations == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( entityRecognizerAnnotations . getS3Uri ( ) , S3URI_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class DeploymentsInner { /** * Exports the template used for specified deployment . * @ param resourceGroupName The name of the resource group . The name is case insensitive . * @ param deploymentName The name of the deployment from which to get the template . * @ param serviceCallback the async ServiceCallback to handle successful and failed responses . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the { @ link ServiceFuture } object */ public ServiceFuture < DeploymentExportResultInner > exportTemplateAsync ( String resourceGroupName , String deploymentName , final ServiceCallback < DeploymentExportResultInner > serviceCallback ) { } }
return ServiceFuture . fromResponse ( exportTemplateWithServiceResponseAsync ( resourceGroupName , deploymentName ) , serviceCallback ) ;
public class LaActionExecuteUtil { public static String buildSimpleMethodExp ( Method executeMethod ) { } }
final StringBuilder sb = new StringBuilder ( ) ; doBuildSimpleMethodNameDefExp ( sb , executeMethod ) ; doBuildSimpleMethodParameterExp ( sb , executeMethod ) ; return sb . toString ( ) ;
public class GenerateDataKeyRequest { /** * A set of key - value pairs that represents additional authenticated data . * For more information , see < a * href = " http : / / docs . aws . amazon . com / kms / latest / developerguide / encryption - context . html " > Encryption Context < / a > in the * < i > AWS Key Management Service Developer Guide < / i > . * @ param encryptionContext * A set of key - value pairs that represents additional authenticated data . < / p > * For more information , see < a * href = " http : / / docs . aws . amazon . com / kms / latest / developerguide / encryption - context . html " > Encryption Context < / a > * in the < i > AWS Key Management Service Developer Guide < / i > . * @ return Returns a reference to this object so that method calls can be chained together . */ public GenerateDataKeyRequest withEncryptionContext ( java . util . Map < String , String > encryptionContext ) { } }
setEncryptionContext ( encryptionContext ) ; return this ;
public class ComplexPixel { /** * Multiplies a complex number to this pixel . This performs a complex multiplication * and stores the result in this pixel . * @ param r real part to multiply * @ param i imaginary part to multiply * @ return this */ public ComplexPixel mult ( double r , double i ) { } }
double thisr = real ( ) , thisi = imag ( ) ; return setComplex ( thisr * r - thisi * i , thisr * i + thisi * r ) ;
public class LocalResponseAnalysisEngine { /** * This method simply logs or executes responses . * @ param response { @ link Response } that has been added to the { @ link ResponseStore } . */ @ Override public void analyze ( Response response ) { } }
if ( response == null ) { return ; } if ( ResponseHandler . LOG . equals ( response . getAction ( ) ) ) { logger . info ( "Handling <log> response for user <{}>" , response . getUser ( ) . getUsername ( ) ) ; } else { logger . info ( "Delegating response for user <{}> to configured response handler <{}>" , response . getUser ( ) . getUsername ( ) , responseHandler . getClass ( ) . getName ( ) ) ; responseHandler . handle ( response ) ; }
public class ExcelUtils { /** * sheet生成 */ private void generateSheet ( SheetTemplate template , int sheetIndex , Map < String , List < ? > > data , Map < String , String > extendMap , Class clazz , boolean isWriteHeader ) throws Excel4JException { } }
SheetTemplateHandler . loadTemplate ( template , sheetIndex ) ; SheetTemplateHandler . extendData ( template , extendMap ) ; List < ExcelHeader > headers = Utils . getHeaderList ( clazz ) ; if ( isWriteHeader ) { // 写标题 SheetTemplateHandler . createNewRow ( template ) ; for ( ExcelHeader header : headers ) { SheetTemplateHandler . createCell ( template , header . getTitle ( ) , null ) ; } } for ( Map . Entry < String , List < ? > > entry : data . entrySet ( ) ) { for ( Object object : entry . getValue ( ) ) { SheetTemplateHandler . createNewRow ( template ) ; SheetTemplateHandler . insertSerial ( template , entry . getKey ( ) ) ; for ( ExcelHeader header : headers ) { SheetTemplateHandler . createCell ( template , Utils . getProperty ( object , header . getFiled ( ) , header . getWriteConverter ( ) ) , entry . getKey ( ) ) ; } } }
public class CPFriendlyURLEntryPersistenceImpl { /** * Caches the cp friendly url entry in the entity cache if it is enabled . * @ param cpFriendlyURLEntry the cp friendly url entry */ @ Override public void cacheResult ( CPFriendlyURLEntry cpFriendlyURLEntry ) { } }
entityCache . putResult ( CPFriendlyURLEntryModelImpl . ENTITY_CACHE_ENABLED , CPFriendlyURLEntryImpl . class , cpFriendlyURLEntry . getPrimaryKey ( ) , cpFriendlyURLEntry ) ; finderCache . putResult ( FINDER_PATH_FETCH_BY_UUID_G , new Object [ ] { cpFriendlyURLEntry . getUuid ( ) , cpFriendlyURLEntry . getGroupId ( ) } , cpFriendlyURLEntry ) ; finderCache . putResult ( FINDER_PATH_FETCH_BY_G_C_L_U , new Object [ ] { cpFriendlyURLEntry . getGroupId ( ) , cpFriendlyURLEntry . getClassNameId ( ) , cpFriendlyURLEntry . getLanguageId ( ) , cpFriendlyURLEntry . getUrlTitle ( ) } , cpFriendlyURLEntry ) ; finderCache . putResult ( FINDER_PATH_FETCH_BY_G_C_C_L_U , new Object [ ] { cpFriendlyURLEntry . getGroupId ( ) , cpFriendlyURLEntry . getClassNameId ( ) , cpFriendlyURLEntry . getClassPK ( ) , cpFriendlyURLEntry . getLanguageId ( ) , cpFriendlyURLEntry . getUrlTitle ( ) } , cpFriendlyURLEntry ) ; finderCache . putResult ( FINDER_PATH_FETCH_BY_G_C_C_L_M , new Object [ ] { cpFriendlyURLEntry . getGroupId ( ) , cpFriendlyURLEntry . getClassNameId ( ) , cpFriendlyURLEntry . getClassPK ( ) , cpFriendlyURLEntry . getLanguageId ( ) , cpFriendlyURLEntry . isMain ( ) } , cpFriendlyURLEntry ) ; cpFriendlyURLEntry . resetOriginalValues ( ) ;
public class SetIterables { /** * Returns an Immutable version of powerset where the inner sets are also immutable . */ public static < T > ImmutableSet < ImmutableSet < T > > immutablePowerSet ( Set < T > set ) { } }
return powerSet ( set ) . collect ( new Function < MutableSet < T > , ImmutableSet < T > > ( ) { public ImmutableSet < T > valueOf ( MutableSet < T > set ) { return set . toImmutable ( ) ; } } ) . toImmutable ( ) ;
public class ForwardCurveFromDiscountCurve { /** * / * ( non - Javadoc ) * @ see net . finmath . marketdata . ForwardCurveInterface # getForward ( double ) */ @ Override public RandomVariable getForward ( AnalyticModel model , double fixingTime , double paymentOffset ) { } }
if ( model == null ) { throw new IllegalArgumentException ( this . getName ( ) + ": model==null" ) ; } DiscountCurveInterface referenceDiscountCurveForForwards = model . getDiscountCurve ( referenceDiscountCurveForForwardsName ) ; // do not use discountCurveName here ( usually this is an OIS curve ) if ( referenceDiscountCurveForForwards == null ) { throw new IllegalArgumentException ( this . getName ( ) + ": referenceDiscountCurveForForwards " + referenceDiscountCurveForForwardsName + " not found in the model:\n" + model . toString ( ) ) ; } if ( Double . isNaN ( paymentOffset ) || paymentOffset <= 0.0 ) { throw new IllegalArgumentException ( this . getName ( ) + ": Requesting forward with paymentOffset " + paymentOffset + " not allowed." ) ; } double daycount = paymentOffset * daycountScaling ; return referenceDiscountCurveForForwards . getDiscountFactor ( model , fixingTime + periodOffset ) . div ( referenceDiscountCurveForForwards . getDiscountFactor ( model , fixingTime + paymentOffset + periodOffset ) ) . sub ( 1.0 ) . div ( daycount ) ;
public class Node { /** * Returns the set of ES5 directives for this node . */ @ SuppressWarnings ( "unchecked" ) @ Nullable public final Set < String > getDirectives ( ) { } }
return ( Set < String > ) getProp ( Prop . DIRECTIVES ) ;
public class WorkingMemoryLogger { /** * Creates a string representation of the declarations of an activation . * This is a list of name - value - pairs for each of the declarations in the * tuple of the activation . The name is the identifier ( = name ) of the * declaration , and the value is a toString of the value of the * parameter , followed by the id of the fact between parentheses . * @ param match The match from which the declarations should be extracted * @ return A String represetation of the declarations of the activation . */ private String extractDeclarations ( Match match ) { } }
final StringBuilder result = new StringBuilder ( ) ; List < String > declarations = match . getDeclarationIds ( ) ; Map < String , Declaration > declsMap = ( ( AgendaItem ) match ) . getTerminalNode ( ) . getSubRule ( ) . getOuterDeclarations ( ) ; for ( int i = 0 ; i < declarations . size ( ) ; i ++ ) { String declaration = declarations . get ( i ) ; Declaration decl = declsMap . get ( declaration ) ; InternalFactHandle handle = ( ( Tuple ) match ) . get ( decl ) ; if ( ! handle . isValid ( ) ) { continue ; } Object value = decl . getValue ( null , handle . getObject ( ) ) ; result . append ( declaration ) ; result . append ( "=" ) ; if ( value == null ) { // this should never occur result . append ( "null" ) ; } else { result . append ( value ) ; } if ( i < declarations . size ( ) - 1 ) { result . append ( "; " ) ; } } return result . toString ( ) ;
public class StAXUtils { /** * Extract or create an instance of { @ link XMLStreamWriter } from the provided { @ link Result } . * @ param factory the { @ link XMLOutputFactory } to use ( if needed ) * @ param result the result * @ return the { @ link XMLStreamWriter } * @ throws XMLStreamException when failing to extract xml stream writer * @ since 9.5.2 * @ since 9.6RC1 */ public static XMLStreamWriter getXMLStreamWriter ( XMLOutputFactory factory , Result result ) throws XMLStreamException { } }
XMLStreamWriter xmlStreamWriter ; if ( result instanceof SAXResult ) { // SAXResult is not supported by the standard XMLOutputFactory xmlStreamWriter = new XMLEventStreamWriter ( new SAXEventWriter ( ( ( SAXResult ) result ) . getHandler ( ) ) , XML_EVENT_FACTORY ) ; } else if ( result instanceof StAXResult ) { // XMLEventWriter is not supported as result of XMLOutputFactory # createXMLStreamWriter StAXResult staxResult = ( StAXResult ) result ; if ( staxResult . getXMLStreamWriter ( ) != null ) { xmlStreamWriter = staxResult . getXMLStreamWriter ( ) ; } else { xmlStreamWriter = new XMLEventStreamWriter ( staxResult . getXMLEventWriter ( ) , XML_EVENT_FACTORY ) ; } } else { xmlStreamWriter = factory . createXMLStreamWriter ( result ) ; } return xmlStreamWriter ;
public class Credential { /** * Sets the expected expiration time in milliseconds relative to the * { @ link System # currentTimeMillis ( ) Java epoch } , or { @ code null } for none . * Overriding is only supported for the purpose of calling the super implementation and changing * the return type , but nothing else . */ public Credential setExpirationTimeMilliseconds ( Long expirationTimeMilliseconds ) { } }
lock . lock ( ) ; try { this . expirationTimeMilliseconds = expirationTimeMilliseconds ; } finally { lock . unlock ( ) ; } return this ;
public class AbstractHessianResolver { /** * Looks up a proxy object . */ public Object lookup ( String type , String url ) throws IOException { } }
return new HessianRemote ( type , url ) ;
public class CmsLock { /** * Performs the lock state operation on a single resource . < p > * @ param resourceName the resource name to perform the operation on * @ param dialogAction the lock action : lock , unlock or change lock * @ throws CmsException if the operation fails */ protected void performSingleResourceOperation ( String resourceName , int dialogAction ) throws CmsException { } }
// store original name to use for lock action String originalResourceName = resourceName ; CmsResource res = getCms ( ) . readResource ( resourceName , CmsResourceFilter . ALL ) ; if ( res . isFolder ( ) && ! resourceName . endsWith ( "/" ) ) { resourceName += "/" ; } org . opencms . lock . CmsLock lock = getCms ( ) . getLock ( res ) ; // perform action depending on dialog uri switch ( dialogAction ) { case TYPE_LOCKCHANGE : case TYPE_LOCK : if ( lock . isNullLock ( ) ) { getCms ( ) . lockResource ( originalResourceName ) ; } else if ( ! lock . isDirectlyOwnedInProjectBy ( getCms ( ) ) ) { getCms ( ) . changeLock ( resourceName ) ; } break ; case TYPE_UNLOCK : default : if ( lock . isNullLock ( ) ) { break ; } if ( lock . isOwnedBy ( getCms ( ) . getRequestContext ( ) . getCurrentUser ( ) ) ) { getCms ( ) . unlockResource ( resourceName ) ; } }
public class TokenCachingStrategy { /** * Gets the cached token value from a Bundle . * @ param bundle * A Bundle in which the token value was stored . * @ return the cached token value , or null . * @ throws NullPointerException if the passed in Bundle is null */ public static String getToken ( Bundle bundle ) { } }
Validate . notNull ( bundle , "bundle" ) ; return bundle . getString ( TOKEN_KEY ) ;
public class AbstractExtendedSet { /** * { @ inheritDoc } */ @ Override public boolean containsAtLeast ( Collection < ? extends T > other , int minElements ) { } }
if ( minElements < 1 ) throw new IllegalArgumentException ( ) ; return intersectionSize ( other ) >= minElements ;
public class HintRule { /** * Adds a given vendor to the list of evidence to remove when matched . * @ param source the source of the evidence * @ param name the name of the evidence * @ param value the value of the evidence * @ param regex whether value is a regex * @ param confidence the confidence of the evidence */ public void addRemoveVendor ( String source , String name , String value , boolean regex , Confidence confidence ) { } }
removeVendor . add ( new EvidenceMatcher ( source , name , value , regex , confidence ) ) ;
public class JFapByteBuffer { /** * Puts a single byte into the byte buffer . * @ param item */ public synchronized void put ( byte item ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( this , tc , "put" , Byte . valueOf ( item ) ) ; checkValid ( ) ; getCurrentByteBuffer ( 1 ) . put ( item ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( this , tc , "put" ) ;
public class ServiceFamiliesDIB { /** * / * ( non - Javadoc ) * @ see tuwien . auto . calimero . knxnetip . util . DIB # toByteArray ( ) */ @ Override public byte [ ] toByteArray ( ) { } }
final byte [ ] buf = super . toByteArray ( ) ; int k = 2 ; for ( int i = 0 ; i < ids . length ; i ++ ) { buf [ k ++ ] = ( byte ) ids [ i ] ; buf [ k ++ ] = ( byte ) versions [ i ] ; } return buf ;
public class FluoConfiguration { /** * Removes any configured observers . * @ deprecated since 1.1.0 . Replaced by { @ link # setObserverProvider ( String ) } and * { @ link # getObserverProvider ( ) } */ @ Deprecated public FluoConfiguration clearObservers ( ) { } }
Iterator < String > iter1 = getKeys ( OBSERVER_PREFIX . substring ( 0 , OBSERVER_PREFIX . length ( ) - 1 ) ) ; while ( iter1 . hasNext ( ) ) { String key = iter1 . next ( ) ; clearProperty ( key ) ; } return this ;
public class Flowable { /** * Returns a Single that emits a single HashMap containing all items emitted by the finite source Publisher , * mapped by the keys returned by a specified { @ code keySelector } function . * < img width = " 640 " height = " 305 " src = " https : / / raw . github . com / wiki / ReactiveX / RxJava / images / rx - operators / toMap . png " alt = " " > * If more than one source item maps to the same key , the HashMap will contain the latest of those items . * Note that this operator requires the upstream to signal { @ code onComplete } for the accumulated map to * be emitted . Sources that are infinite and never complete will never emit anything through this * operator and an infinite source may lead to a fatal { @ code OutOfMemoryError } . * < dl > * < dt > < b > Backpressure : < / b > < / dt > * < dd > The operator honors backpressure from downstream and consumes the source { @ code Publisher } in an * unbounded manner ( i . e . , without applying backpressure to it ) . < / dd > * < dt > < b > Scheduler : < / b > < / dt > * < dd > { @ code toMap } does not operate by default on a particular { @ link Scheduler } . < / dd > * < / dl > * @ param < K > the key type of the Map * @ param keySelector * the function that extracts the key from a source item to be used in the HashMap * @ return a Single that emits a single item : a HashMap containing the mapped items from the source * Publisher * @ see < a href = " http : / / reactivex . io / documentation / operators / to . html " > ReactiveX operators documentation : To < / a > */ @ CheckReturnValue @ BackpressureSupport ( BackpressureKind . UNBOUNDED_IN ) @ SchedulerSupport ( SchedulerSupport . NONE ) public final < K > Single < Map < K , T > > toMap ( final Function < ? super T , ? extends K > keySelector ) { } }
ObjectHelper . requireNonNull ( keySelector , "keySelector is null" ) ; return collect ( HashMapSupplier . < K , T > asCallable ( ) , Functions . toMapKeySelector ( keySelector ) ) ;
public class JavaCompilerUtil { /** * Compiles the class . className is a fully qualified Java class , e . g . * work . jsp . Test * @ param fileName Java source name - - in VFS format * @ param lineMap mapping from generated class back to the source class * @ param ifModified compile only if the * . java is modified * @ return compiled class */ public void compile ( String fileName , LineMap lineMap , boolean ifModified ) throws IOException , ClassNotFoundException { } }
if ( _compileParent ) { try { if ( _loader instanceof Make ) { ( ( Make ) _loader ) . make ( ) ; } } catch ( RuntimeException e ) { throw e ; } catch ( ClassNotFoundException e ) { throw e ; } catch ( IOException e ) { throw e ; } catch ( Exception e ) { throw ConfigException . wrap ( e ) ; } } int p = fileName . lastIndexOf ( '.' ) ; String path = fileName . substring ( 0 , p ) ; String javaName = path + _sourceExt ; PathImpl javaPath = getSourceDir ( ) . lookup ( javaName ) ; String className = path + ".class" ; PathImpl classPath = getClassDir ( ) . lookup ( className ) ; synchronized ( LOCK ) { if ( ifModified && javaPath . getLastModified ( ) <= classPath . getLastModified ( ) ) return ; if ( javaPath . canRead ( ) && classPath . exists ( ) ) classPath . remove ( ) ; } _compilerService . compile ( this , new String [ ] { fileName } , lineMap ) ; // XXX : This is needed for some regressions to pass , // basically the timing wouldn ' t work if the classpath time // was selected by the compiler // server / 141d , server / 10k0 // classPath . setLastModified ( javaPath . getLastModified ( ) ) ;
public class CmsSiteManagerImpl { /** * Updates the general settings . < p > * @ param cms the cms to use * @ param defaulrUri the default URI * @ param workplaceServers the workplace server URLs * @ param sharedFolder the shared folder URI * @ throws CmsException if something goes wrong */ public void updateGeneralSettings ( CmsObject cms , String defaulrUri , Map < String , CmsSSLMode > workplaceServers , String sharedFolder ) throws CmsException { } }
CmsObject clone = OpenCms . initCmsObject ( cms ) ; clone . getRequestContext ( ) . setSiteRoot ( "" ) ; // set the shared folder if ( ( sharedFolder == null ) || sharedFolder . equals ( "" ) || sharedFolder . equals ( "/" ) || ! sharedFolder . startsWith ( "/" ) || ! sharedFolder . endsWith ( "/" ) || sharedFolder . startsWith ( "/sites/" ) ) { throw new CmsException ( Messages . get ( ) . container ( Messages . ERR_INVALID_PATH_FOR_SHARED_FOLDER_1 , sharedFolder ) ) ; } m_frozen = false ; setDefaultUri ( clone . readResource ( defaulrUri ) . getRootPath ( ) ) ; setSharedFolder ( clone . readResource ( sharedFolder ) . getRootPath ( ) ) ; m_workplaceServers = workplaceServers ; initialize ( cms ) ; m_frozen = true ;
public class JMX { public static JMX connect ( String host , int port , String user , char [ ] password ) { } }
return new JMX ( host , port , user , password ) ;
public class Key { /** * Builds Key from a Class and annotation * @ param type target class * @ param ann associated annotation * @ param < T > target type * @ return instance of a Key */ public static < T > Key < T > of ( Class < T > type , Annotation ann ) { } }
Objects . requireNonNull ( type ) ; Objects . requireNonNull ( ann ) ; return new Key < > ( type , new Annotation [ ] { ann } ) ;
public class AutoClassDiscovery { /** * Initializes the class cache */ protected static synchronized void initCache ( ) { } }
if ( m_Cache == null ) { m_Cache = new ClassCache ( ) ; // failed to locate any classes on the classpath , maybe inside Weka ? // try loading fixed list of classes if ( m_Cache . isEmpty ( ) ) { InputStream inputStream = null ; try { inputStream = m_Cache . getClass ( ) . getResourceAsStream ( CLASS_LIST ) ; m_Cache = new ClassCache ( new FixedClassListTraversal ( inputStream ) ) ; } catch ( Exception e ) { System . err . println ( "Failed to initialize class cache from fixed list (" + CLASS_LIST + ")!" ) ; e . printStackTrace ( ) ; } finally { if ( inputStream != null ) { try { inputStream . close ( ) ; } catch ( Exception e ) { // ignored } } } } }
public class HttpPostStandardRequestDecoder { /** * This getMethod fill the map and list with as much Attribute as possible from * Body in not Multipart mode . * @ throws ErrorDataDecoderException * if there is a problem with the charset decoding or other * errors */ private void parseBodyAttributes ( ) { } }
if ( ! undecodedChunk . hasArray ( ) ) { parseBodyAttributesStandard ( ) ; return ; } SeekAheadOptimize sao = new SeekAheadOptimize ( undecodedChunk ) ; int firstpos = undecodedChunk . readerIndex ( ) ; int currentpos = firstpos ; int equalpos ; int ampersandpos ; if ( currentStatus == MultiPartStatus . NOTSTARTED ) { currentStatus = MultiPartStatus . DISPOSITION ; } boolean contRead = true ; try { loop : while ( sao . pos < sao . limit ) { char read = ( char ) ( sao . bytes [ sao . pos ++ ] & 0xFF ) ; currentpos ++ ; switch ( currentStatus ) { case DISPOSITION : // search ' = ' if ( read == '=' ) { currentStatus = MultiPartStatus . FIELD ; equalpos = currentpos - 1 ; String key = decodeAttribute ( undecodedChunk . toString ( firstpos , equalpos - firstpos , charset ) , charset ) ; currentAttribute = factory . createAttribute ( request , key ) ; firstpos = currentpos ; } else if ( read == '&' ) { // special empty FIELD currentStatus = MultiPartStatus . DISPOSITION ; ampersandpos = currentpos - 1 ; String key = decodeAttribute ( undecodedChunk . toString ( firstpos , ampersandpos - firstpos , charset ) , charset ) ; currentAttribute = factory . createAttribute ( request , key ) ; currentAttribute . setValue ( "" ) ; // empty addHttpData ( currentAttribute ) ; currentAttribute = null ; firstpos = currentpos ; contRead = true ; } break ; case FIELD : // search ' & ' or end of line if ( read == '&' ) { currentStatus = MultiPartStatus . DISPOSITION ; ampersandpos = currentpos - 1 ; setFinalBuffer ( undecodedChunk . copy ( firstpos , ampersandpos - firstpos ) ) ; firstpos = currentpos ; contRead = true ; } else if ( read == HttpConstants . CR ) { if ( sao . pos < sao . limit ) { read = ( char ) ( sao . bytes [ sao . pos ++ ] & 0xFF ) ; currentpos ++ ; if ( read == HttpConstants . LF ) { currentStatus = MultiPartStatus . PREEPILOGUE ; ampersandpos = currentpos - 2 ; sao . setReadPosition ( 0 ) ; setFinalBuffer ( undecodedChunk . copy ( firstpos , ampersandpos - firstpos ) ) ; firstpos = currentpos ; contRead = false ; break loop ; } else { // Error sao . setReadPosition ( 0 ) ; throw new ErrorDataDecoderException ( "Bad end of line" ) ; } } else { if ( sao . limit > 0 ) { currentpos -- ; } } } else if ( read == HttpConstants . LF ) { currentStatus = MultiPartStatus . PREEPILOGUE ; ampersandpos = currentpos - 1 ; sao . setReadPosition ( 0 ) ; setFinalBuffer ( undecodedChunk . copy ( firstpos , ampersandpos - firstpos ) ) ; firstpos = currentpos ; contRead = false ; break loop ; } break ; default : // just stop sao . setReadPosition ( 0 ) ; contRead = false ; break loop ; } } if ( isLastChunk && currentAttribute != null ) { // special case ampersandpos = currentpos ; if ( ampersandpos > firstpos ) { setFinalBuffer ( undecodedChunk . copy ( firstpos , ampersandpos - firstpos ) ) ; } else if ( ! currentAttribute . isCompleted ( ) ) { setFinalBuffer ( EMPTY_BUFFER ) ; } firstpos = currentpos ; currentStatus = MultiPartStatus . EPILOGUE ; } else if ( contRead && currentAttribute != null && currentStatus == MultiPartStatus . FIELD ) { // reset index except if to continue in case of FIELD getStatus currentAttribute . addContent ( undecodedChunk . copy ( firstpos , currentpos - firstpos ) , false ) ; firstpos = currentpos ; } undecodedChunk . readerIndex ( firstpos ) ; } catch ( ErrorDataDecoderException e ) { // error while decoding undecodedChunk . readerIndex ( firstpos ) ; throw e ; } catch ( IOException e ) { // error while decoding undecodedChunk . readerIndex ( firstpos ) ; throw new ErrorDataDecoderException ( e ) ; } catch ( IllegalArgumentException e ) { // error while decoding undecodedChunk . readerIndex ( firstpos ) ; throw new ErrorDataDecoderException ( e ) ; }
public class Document { /** * Adds an existing revision copied from another database . Unlike a normal insertion , this does * not assign a new revision ID ; instead the revision ' s ID must be given . The revision ' s history * ( ancestry ) must be given , which can put it anywhere in the revision tree . It ' s not an error if * the revision already exists locally ; it will just be ignored . * This is not an operation that clients normally perform ; it ' s used by the replicator . * You might want to use it if you ' re pre - loading a database with canned content , or if you ' re * implementing some new kind of replicator that transfers revisions from another database . * @ param properties The properties of the revision ( _ id and _ rev will be ignored , but _ deleted * and _ attachments are recognized . ) * @ param attachments A dictionary providing attachment bodies . The keys are the attachment * names ( matching the keys in the properties ' ` _ attachments ` dictionary ) and * the values are the attachment bodies as NSData or NSURL . * @ param revIDs The revision history in the form of an array of revision - ID strings , in * reverse chronological order . The first item must be the new revision ' s ID . * Following items are its parent ' s ID , etc . * @ param sourceURL The URL of the database this revision came from , if any . ( This value shows * up in the CBLDatabaseChange triggered by this insertion , and can help clients * decide whether the change is local or not . ) * @ return true on success , false on failure . * @ throws CouchbaseLiteException Error information will be thrown if the insertion fails . */ @ InterfaceAudience . Public public boolean putExistingRevision ( Map < String , Object > properties , Map < String , Object > attachments , List < String > revIDs , URL sourceURL ) throws CouchbaseLiteException { } }
assert ( revIDs != null && revIDs . size ( ) > 0 ) ; boolean deleted = false ; if ( properties != null ) deleted = properties . get ( "_deleted" ) != null && ( ( Boolean ) properties . get ( "_deleted" ) ) . booleanValue ( ) ; RevisionInternal rev = new RevisionInternal ( documentId , revIDs . get ( 0 ) , deleted ) ; rev . setProperties ( propertiesToInsert ( properties ) ) ; Status status = new Status ( ) ; if ( ! database . registerAttachmentBodies ( attachments , rev , status ) ) return false ; database . forceInsert ( rev , revIDs , sourceURL ) ; return true ;
public class AbstractJaxRsResourceProvider { /** * Delete a resource * @ param id the id of the resource to delete * @ return the response * @ see < a href = " https : / / www . hl7 . org / fhir / http . html # delete " > https : / / www . hl7 . org / fhir / http . html # delete < / a > */ @ DELETE @ Path ( "/{id}" ) public Response delete ( @ PathParam ( "id" ) final String id ) throws IOException { } }
return execute ( getResourceRequest ( RequestTypeEnum . DELETE , RestOperationTypeEnum . DELETE ) . id ( id ) ) ;
public class SRTServletResponse { /** * Alert message that the outputstream has been written to . */ public void alertFirstFlush ( ) { } }
if ( com . ibm . ejs . ras . TraceComponent . isAnyTracingEnabled ( ) && logger . isLoggable ( Level . FINE ) ) // 306998.15 logger . logp ( Level . FINE , CLASS_NAME , "alertFirstFlush" , "entry" ) ; if ( ! isCommitted ( ) ) { commit ( ) ; } if ( com . ibm . ejs . ras . TraceComponent . isAnyTracingEnabled ( ) && logger . isLoggable ( Level . FINE ) ) // 306998.15 logger . logp ( Level . FINE , CLASS_NAME , "alertFirstFlush" , "exit" ) ;
public class CmsObject { /** * Adds a user to a group . < p > * @ param username the name of the user that is to be added to the group * @ param groupname the name of the group * @ throws CmsException if something goes wrong */ public void addUserToGroup ( String username , String groupname ) throws CmsException { } }
m_securityManager . addUserToGroup ( m_context , username , groupname , false ) ;
public class StatefulBeanReaper { /** * Dump the internal state of the cache */ public void dump ( ) { } }
if ( dumped ) { return ; } try { Tr . dump ( tc , "-- StatefulBeanReaper Dump -- " , this ) ; synchronized ( this ) { Tr . dump ( tc , "Number of objects: " + this . numObjects ) ; Tr . dump ( tc , "Number of adds: " + this . numAdds ) ; Tr . dump ( tc , "Number of removes: " + this . numRemoves ) ; Tr . dump ( tc , "Number of null removes: " + this . numNullRemoves ) ; Tr . dump ( tc , "Number of deletes: " + this . numDeletes ) ; } } finally { dumped = true ; }
public class Property { /** * Retrieves the hub property if it is set . This could be to sauce , grid , or any other cloud tool . * This should be provided with the protocol and address , but leaving out the / wd / hub * @ return String : the set hub address , null if none are set */ public static String getHub ( ) throws InvalidHubException { } }
String hub = getProgramProperty ( HUB ) ; if ( hub == null || "" . equals ( hub ) ) { throw new InvalidHubException ( "Hub isn't set" ) ; } return hub ;
public class ExpressionUtils { /** * Encodes text for inclusion in a URL query string . Should be equivalent to Django ' s urlquote function . * @ param text the text to encode * @ return the encoded text */ public static String urlquote ( String text ) { } }
try { return URLEncoder . encode ( text , "UTF-8" ) . replace ( "+" , "%20" ) ; } catch ( UnsupportedEncodingException ex ) { throw new RuntimeException ( ex ) ; }
public class CustomField { /** * Gets the visibility value for this CustomField . * @ return visibility * How visible / accessible this field is in the UI . */ public com . google . api . ads . admanager . axis . v201805 . CustomFieldVisibility getVisibility ( ) { } }
return visibility ;
public class Validate { /** * Validates the given CPE string value to ensure it is either a valid CPE * URI or Formatted String . * @ param value the CPE to validate * @ return the validation status given value ; * @ see us . springett . parsers . cpe . util . Status # isValid ( ) */ public static Status cpe ( String value ) { } }
if ( "cpe:2.3:" . regionMatches ( 0 , value , 0 , 8 ) ) { return formattedString ( value ) ; } return cpeUri ( value ) ;
public class ProcessExecutorImpl { /** * / / / / / process finish */ void handleProcessFinish ( InternalEvent event ) throws ProcessException { } }
try { String ownerType = event . getOwnerType ( ) ; String secondaryOwnerType = event . getSecondaryOwnerType ( ) ; if ( ! OwnerType . ACTIVITY_INSTANCE . equals ( secondaryOwnerType ) ) { // top level processes ( non - remote ) or ABORT embedded processes ProcessInstance pi = edao . getProcessInstance ( event . getWorkInstanceId ( ) ) ; Process subProcVO = getProcessDefinition ( pi ) ; if ( pi . isEmbedded ( ) ) { subProcVO . getSubProcessVO ( event . getWorkId ( ) ) ; String embeddedProcType = subProcVO . getAttribute ( WorkAttributeConstant . EMBEDDED_PROCESS_TYPE ) ; if ( ProcessVisibilityConstant . EMBEDDED_ABORT_PROCESS . equals ( embeddedProcType ) ) { Long parentProcInstId = event . getOwnerId ( ) ; pi = edao . getProcessInstance ( parentProcInstId ) ; this . cancelProcessInstanceTree ( pi ) ; if ( logger . isInfoEnabled ( ) ) { logger . info ( logtag ( pi . getProcessId ( ) , pi . getId ( ) , pi . getMasterRequestId ( ) ) , "Process cancelled" ) ; } InternalEvent procFinishMsg = InternalEvent . createProcessFinishMessage ( pi ) ; if ( OwnerType . ACTIVITY_INSTANCE . equals ( pi . getSecondaryOwner ( ) ) ) { procFinishMsg . setSecondaryOwnerType ( pi . getSecondaryOwner ( ) ) ; procFinishMsg . setSecondaryOwnerId ( pi . getSecondaryOwnerId ( ) ) ; } this . sendInternalEvent ( procFinishMsg ) ; } } } else if ( ownerType . equals ( OwnerType . PROCESS_INSTANCE ) || ownerType . equals ( OwnerType . MAIN_PROCESS_INSTANCE ) || ownerType . equals ( OwnerType . ERROR ) ) { // local process call or call to error / correction / delay handler Long activityInstId = event . getSecondaryOwnerId ( ) ; ActivityInstance actInst = edao . getActivityInstance ( activityInstId ) ; ProcessInstance procInst = edao . getProcessInstance ( actInst . getProcessInstanceId ( ) ) ; BaseActivity cntrActivity = prepareActivityForResume ( event , procInst , actInst ) ; if ( cntrActivity != null ) { resumeProcessInstanceForSecondaryOwner ( event , cntrActivity ) ; } // else the process is completed / cancelled } } catch ( Exception e ) { throw new ProcessException ( - 1 , e . getMessage ( ) , e ) ; }
public class NoteEditor { /** * GEN - LAST : event _ buttonImportActionPerformed */ private void buttonExportActionPerformed ( java . awt . event . ActionEvent evt ) { } }
// GEN - FIRST : event _ buttonExportActionPerformed final File toSave = DialogProviderManager . getInstance ( ) . getDialogProvider ( ) . msgSaveFileDialog ( null , "note-editor" , UiUtils . BUNDLE . getString ( "PlainTextEditor.buttonSaveActionPerformed.saveTitle" ) , null , true , TEXT_FILE_FILTER , "Save" ) ; // NOI18N if ( toSave != null ) { try { final String text = getText ( ) ; FileUtils . writeStringToFile ( toSave , text , "UTF-8" ) ; // NOI18N } catch ( Exception ex ) { LOGGER . error ( "Error during text file saving" , ex ) ; // NOI18N DialogProviderManager . getInstance ( ) . getDialogProvider ( ) . msgError ( Main . getApplicationFrame ( ) , UiUtils . BUNDLE . getString ( "PlainTextEditor.buttonSaveActionPerformed.msgError" ) ) ; } }
public class JsonArray { /** * Convenience method to prevent casting JsonElement to JsonArray when iterating in the common case that you have * an array of JsonArrays . * @ return iterable that iterates over JsonArrays instead of JsonElements . */ public @ Nonnull Iterable < JsonArray > arrays ( ) { } }
final JsonArray parent = this ; return ( ) -> { final Iterator < JsonElement > iterator = parent . iterator ( ) ; return new Iterator < JsonArray > ( ) { @ Override public boolean hasNext ( ) { return iterator . hasNext ( ) ; } @ Override public JsonArray next ( ) { return iterator . next ( ) . asArray ( ) ; } @ Override public void remove ( ) { iterator . remove ( ) ; } } ; } ;
public class WsByteBufferUtils { /** * Convert a buffer into a byte array using the input starting position and * ending limit . If the buffer is null or empty then a null byte [ ] is * returned . * @ param buff * @ param position * @ param limit * @ return byte [ ] */ public static final byte [ ] asByteArray ( WsByteBuffer buff , int position , int limit ) { } }
if ( null == buff ) return null ; int size = limit - position ; if ( 0 == size ) return null ; byte [ ] byteBuff = new byte [ size ] ; int currentPosition = buff . position ( ) ; buff . position ( position ) ; buff . get ( byteBuff , 0 , size ) ; buff . position ( currentPosition ) ; return byteBuff ;
public class TensorSufficientStatistics { /** * Increments the element of that corresponds to the statistic / parameter * featureAssignment . * @ param featureAssignment * @ param amount */ public void incrementFeature ( Assignment featureAssignment , double amount ) { } }
if ( isDense ) { statistics . incrementEntry ( amount , statisticNames . assignmentToIntArray ( featureAssignment ) ) ; } else { Tensor increment = SparseTensor . singleElement ( getTensorDimensions ( ) , getTensorSizes ( ) , statisticNames . assignmentToIntArray ( featureAssignment ) , amount ) ; statisticsTensor = statisticsTensor . elementwiseAddition ( increment ) ; }
public class ReflectiveRandomIndexing { /** * { @ inheritDoc } */ public IntegerVector getVector ( String word ) { } }
IntegerVector v = termToReflectiveSemantics . get ( word ) ; if ( v == null ) { return null ; } return Vectors . immutable ( v ) ;
public class LargestOfMaximum { /** * Computes the largest value of the maximum membership function of a fuzzy * set . The largest value is computed by integrating over the fuzzy set . The * integration algorithm is the midpoint rectangle method * ( https : / / en . wikipedia . org / wiki / Rectangle _ method ) . * @ param term is the fuzzy set * @ param minimum is the minimum value of the fuzzy set * @ param maximum is the maximum value of the fuzzy set * @ return the largest ` x ` - coordinate of the maximum membership function * value in the fuzzy set */ @ Override public double defuzzify ( Term term , double minimum , double maximum ) { } }
if ( ! Op . isFinite ( minimum + maximum ) ) { return Double . NaN ; } final int resolution = getResolution ( ) ; final double dx = ( maximum - minimum ) / resolution ; double x , y ; double ymax = - 1.0 , xlargest = maximum ; for ( int i = 0 ; i < resolution ; ++ i ) { x = minimum + ( i + 0.5 ) * dx ; y = term . membership ( x ) ; if ( Op . isGE ( y , ymax ) ) { ymax = y ; xlargest = x ; } } return xlargest ;
public class TreeMap { /** * Balancing operations . * Implementations of rebalancings during insertion and deletion are * slightly different than the CLR version . Rather than using dummy * nilnodes , we use a set of accessors that deal properly with null . They * are used to avoid messiness surrounding nullness checks in the main * algorithms . */ private static < K , V > boolean colorOf ( TreeMapEntry < K , V > p ) { } }
return ( p == null ? BLACK : p . color ) ;
public class FileSharedServerLeaseLog { /** * ( non - Javadoc ) * @ see com . ibm . ws . recoverylog . spi . SharedServerLeaseLog # deleteServerLease ( java . lang . String ) */ @ Override public void deleteServerLease ( final String recoveryIdentity ) throws Exception { } }
if ( tc . isEntryEnabled ( ) ) Tr . entry ( tc , "deleteServerLease" , new Object [ ] { recoveryIdentity , this } ) ; // Is a lease file ( equivalent to a record in the DB table ) available for deletion final File leaseFile = new File ( _serverInstallLeaseLogDir + String . valueOf ( File . separatorChar ) + recoveryIdentity ) ; // At this point we are ready to acquire a lock on the control file prior to attempting to delete the server ' s file . FileLock lock = null ; FileChannel channel = AccessController . doPrivileged ( new PrivilegedAction < FileChannel > ( ) { @ Override public FileChannel run ( ) { FileChannel theChannel = null ; try { theChannel = new RandomAccessFile ( _controlFile , "rw" ) . getChannel ( ) ; } catch ( FileNotFoundException e ) { if ( tc . isDebugEnabled ( ) ) Tr . debug ( tc , "Caught FileNotFound exception when trying to lock control file" ) ; theChannel = null ; } return theChannel ; } } ) ; try { // Block until we can acquire the lock on the control file . if ( tc . isDebugEnabled ( ) ) Tr . debug ( tc , "Block until we acquire the lock on the control file" ) ; lock = channel . lock ( ) ; if ( lock != null ) { // Delete the leaseFile AccessController . doPrivileged ( new PrivilegedAction < Void > ( ) { @ Override public Void run ( ) { boolean success = false ; try { // If we are about to delete a peer lease file , then do a check to be sure that a new instance // of the peer has not " recently " started . boolean attemptDelete = true ; if ( ! recoveryIdentity . equals ( _localRecoveryIdentity ) ) { final long leaseTime = leaseFile . lastModified ( ) ; if ( tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "recoveryId: " + recoveryIdentity + ", leaseTime: " + leaseTime ) ; } PeerLeaseData pld = new PeerLeaseData ( recoveryIdentity , leaseTime , _leaseTimeout ) ; if ( ! pld . isExpired ( ) ) { if ( tc . isDebugEnabled ( ) ) Tr . debug ( tc , "The lease file has not expired, do not attempt deletion" ) ; attemptDelete = false ; } } // Attempt to delete the lease file if ( attemptDelete ) { if ( tc . isDebugEnabled ( ) ) Tr . debug ( tc , "Attempt to delete file " + leaseFile . getName ( ) + ", in dir " + _serverInstallLeaseLogDir ) ; success = leaseFile . delete ( ) ; if ( success ) { if ( tc . isDebugEnabled ( ) ) Tr . debug ( tc , "Successfully deleted lease file" ) ; } else { if ( tc . isDebugEnabled ( ) ) Tr . debug ( tc , "Failed to delete lease file" ) ; } } } catch ( SecurityException se ) { if ( tc . isDebugEnabled ( ) ) Tr . debug ( tc , "Caught SecurityException " + se ) ; } return null ; } } ) ; } } catch ( OverlappingFileLockException e ) { // File is already locked in this thread or virtual machine , We ' re not expecting this to happen . Log the event if ( tc . isDebugEnabled ( ) ) Tr . debug ( tc , "The control file aleady appears to be locked in another thread" ) ; } catch ( IOException e ) { // We ' re not expecting this to happen . Log the event if ( tc . isDebugEnabled ( ) ) Tr . debug ( tc , "Caught an IOException" ) ; } finally { // Release the lock - if it is not null ! if ( lock != null ) { lock . release ( ) ; } // Close the channel channel . close ( ) ; } if ( tc . isEntryEnabled ( ) ) Tr . exit ( tc , "deleteServerLease" , this ) ;
public class DescribeAccountAttributesResult { /** * Account quota information . * @ param accountQuotas * Account quota information . */ public void setAccountQuotas ( java . util . Collection < AccountQuota > accountQuotas ) { } }
if ( accountQuotas == null ) { this . accountQuotas = null ; return ; } this . accountQuotas = new java . util . ArrayList < AccountQuota > ( accountQuotas ) ;
public class StyleHelper { /** * Removes enum value style name from UIObject unless style is { @ code null } . * @ param uiObject Object to remove style from * @ param style Style name */ public static < E extends Style . HasCssName > void removeEnumStyleName ( final UIObject uiObject , final E style ) { } }
if ( style != null && style . getCssName ( ) != null && ! style . getCssName ( ) . isEmpty ( ) ) { uiObject . removeStyleName ( style . getCssName ( ) ) ; }
public class SortedIntList { /** * Switch to { @ code java . util . Arrays . binarySearch } when we depend on Java6. */ private static int binarySearch ( int [ ] a , int start , int end , int key ) { } }
int lo = start , hi = end - 1 ; // search range is [ lo , hi ] // invariant lo < = hi while ( lo <= hi ) { int pivot = ( lo + hi ) / 2 ; int v = a [ pivot ] ; if ( v < key ) // needs to search upper half lo = pivot + 1 ; else if ( v > key ) // needs to search lower half hi = pivot - 1 ; else // eureka ! return pivot ; } return - ( lo + 1 ) ; // insertion point
public class JPAPuId { /** * Persistence unit name setter . */ public void setPuName ( String puName ) { } }
// re - initialize puName only if it has not been set to avoid // overriding valid relative puName defined in annotation / dd . if ( ivPuName == null || ivPuName . length ( ) == 0 ) // d442457 { ivPuName = puName ; reComputeHashCode ( ) ; // d416151.3.9 }
public class PoiWriter { /** * Convert string representation back to tags map . */ Map < String , String > stringToTags ( String tagsmapstring ) { } }
String [ ] sb = tagsmapstring . split ( "\\r" ) ; Map < String , String > map = new HashMap < > ( ) ; for ( String key : sb ) { if ( key . contains ( "=" ) ) { String [ ] set = key . split ( "=" ) ; if ( set . length == 2 ) map . put ( set [ 0 ] , set [ 1 ] ) ; } } return map ;
public class ActionScriptUtils { /** * Converts java types to their actionscript equivalents for ooo - style streaming . */ public static void convertBaseClasses ( ImportSet imports ) { } }
// replace primitive types with OOO types ( required for unboxing ) imports . replace ( "byte" , "com.threerings.util.Byte" ) ; imports . replace ( "boolean" , "com.threerings.util.langBoolean" ) ; imports . replace ( "[B" , "flash.utils.ByteArray" ) ; imports . replace ( "float" , "com.threerings.util.Float" ) ; imports . replace ( "long" , "com.threerings.util.Long" ) ; if ( imports . removeAll ( "[*" ) > 0 ) { imports . add ( "com.threerings.io.TypedArray" ) ; } // convert java primitive boxes to their ooo counterparts imports . replace ( Integer . class , "com.threerings.util.Integer" ) ; // convert some java . util types to their ooo counterparts imports . replace ( Map . class , "com.threerings.util.Map" ) ; // get rid of java . lang stuff and any remaining primitives imports . removeGlobals ( ) ; // get rid of remaining arrays imports . removeArrays ( ) ;
public class TSAGeoMag { /** * Returns the horizontal magnetic field intensity from the * Department of Defense geomagnetic model and data * in nano Tesla . * @ paramdlat Latitude in decimal degrees . * @ param dlongLongitude in decimal degrees . * @ paramyearDate of the calculation in decimal years . * @ paramaltitudeAltitude of the calculation in kilometers . * @ return The horizontal magnetic field strength in nano Tesla . */ public double getHorizontalIntensity ( double dlat , double dlong , double year , double altitude ) { } }
calcGeoMag ( dlat , dlong , year , altitude ) ; return bh ;
public class SDValidation { /** * Validate that the operation is being applied on an floating point type SDVariable * @ param opName Operation name to print in the exception * @ param v Variable to validate datatype for ( input to operation ) */ protected static void validateFloatingPoint ( String opName , SDVariable v ) { } }
if ( v == null ) return ; if ( ! v . dataType ( ) . isFPType ( ) ) throw new IllegalStateException ( "Cannot apply operation \"" + opName + "\" to variable \"" + v . getVarName ( ) + "\" with non-floating point data type " + v . dataType ( ) ) ;
public class RateLimitedLog { /** * We ' ve run out of capacity in our cache of RateLimitedLogWithPattern objects . This probably * means that the caller is accidentally calling us with an already - interpolated string , instead * of using the pattern as the key and letting us do the interpolation . Don ' t lose data ; * instead , fall back to flushing the entire cache but carrying on . The worst - case scenario * here is that we flush the logs far more frequently than their requested durations , potentially * allowing the logging to impact throughput , but we don ' t lose any log data . */ private void outOfCacheCapacity ( ) { } }
synchronized ( knownPatterns ) { if ( knownPatterns . size ( ) > MAX_PATTERNS_PER_LOG ) { logger . warn ( "out of capacity in RateLimitedLog registry; accidentally " + "using interpolated strings as patterns?" ) ; registry . flush ( ) ; knownPatterns . clear ( ) ; } }
public class XML { /** * Convert a well - formed ( but not necessarily valid ) XML into a * JSONObject . Some information may be lost in this transformation because * JSON is a data format and XML is a document format . XML uses elements , * attributes , and content text , while JSON uses unordered collections of * name / value pairs and arrays of values . JSON does not does not like to * distinguish between elements and attributes . Sequences of similar * elements are represented as JSONArrays . Content text may be placed in a * " content " member . Comments , prologs , DTDs , and < code > & lt ; [ [ ] ] > < / code > * are ignored . * All values are converted as strings , for 1 , 01 , 29.0 will not be coerced to * numbers but will instead be the exact value as seen in the XML document . * @ param reader The XML source reader . * @ param keepStrings If true , then values will not be coerced into boolean * or numeric values and will instead be left as strings * @ return A JSONObject containing the structured data from the XML string . * @ throws JSONException Thrown if there is an errors while parsing the string */ public static JSONObject toJSONObject ( Reader reader , boolean keepStrings ) throws JSONException { } }
JSONObject jo = new JSONObject ( ) ; XMLTokener x = new XMLTokener ( reader ) ; while ( x . more ( ) ) { x . skipPast ( "<" ) ; if ( x . more ( ) ) { parse ( x , jo , null , keepStrings ) ; } } return jo ;
public class OauthHelper { /** * renew Client Credential token synchronously . * When success will renew the Jwt jwt passed in . * When fail will return Status code so that can be handled by caller . * @ param jwt the jwt you want to renew * @ return Jwt when success , it will be the same object as the jwt you passed in ; return Status when fail ; */ private static Result < Jwt > renewCCTokenSync ( final Jwt jwt ) { } }
// Already expired , try to renew getCCTokenSynchronously but let requests use the old token . logger . trace ( "In renew window and token is already expired." ) ; // the token can be renew when it ' s not on renewing or current time is lager than retrying interval if ( ! jwt . isRenewing ( ) || System . currentTimeMillis ( ) > jwt . getExpiredRetryTimeout ( ) ) { jwt . setRenewing ( true ) ; jwt . setEarlyRetryTimeout ( System . currentTimeMillis ( ) + Jwt . getExpiredRefreshRetryDelay ( ) ) ; Result < Jwt > result = getCCTokenRemotely ( jwt ) ; // set renewing flag to false no mater fail or success jwt . setRenewing ( false ) ; return result ; } else { if ( logger . isTraceEnabled ( ) ) logger . trace ( "Circuit breaker is tripped and not timeout yet!" ) ; // token is renewing return Failure . of ( new Status ( STATUS_CLIENT_CREDENTIALS_TOKEN_NOT_AVAILABLE ) ) ; }
public class TextsearchQueryNode { /** * Sets the relative path to the item where the textsearch is performed . If * < code > relPath < / code > is < code > null < / code > the textsearch is performed on * the context node . * @ param relPath the relative path to an item . * @ throws IllegalArgumentException if < code > relPath < / code > is absolute . */ public void setRelativePath ( QPath relPath ) { } }
if ( relPath != null && relPath . isAbsolute ( ) ) { throw new IllegalArgumentException ( "relPath must be relative" ) ; } this . relPath = relPath ; if ( relPath == null ) { // context node is never a property propertyRef = false ; }
public class NewChunk { /** * ( does not live on inside the K / V store ) . */ public Chunk new_close ( ) { } }
Chunk chk = compress ( ) ; if ( _vec instanceof AppendableVec ) ( ( AppendableVec ) _vec ) . closeChunk ( this ) ; return chk ;
public class XMLConfigurationProvider { /** * Load the XML configuration on memory as a DOM structure with SAX . * Additional information about elements location is added . Non valid DTDs * or XML structures are detected . * @ param file * XML configuration * @ return XML tree */ private Document loadDocument ( String file ) { } }
Document doc = null ; URL url = null ; File f = new File ( file ) ; if ( f . exists ( ) ) { try { url = f . toURI ( ) . toURL ( ) ; } catch ( MalformedURLException e ) { throw new ConfigurationException ( "Unable to load " + file , e ) ; } } if ( url == null ) { url = ClassLoader . getSystemResource ( file ) ; } InputStream is = null ; if ( url == null ) { if ( errorIfMissing ) { throw new ConfigurationException ( "Could not open files of the name " + file ) ; } else { LOG . info ( "Unable to locate configuration files of the name " + file + ", skipping" ) ; return doc ; } } try { is = url . openStream ( ) ; InputSource in = new InputSource ( is ) ; in . setSystemId ( url . toString ( ) ) ; doc = DomHelper . parse ( in , dtdMappings ) ; } catch ( Exception e ) { throw new ConfigurationException ( "Unable to load " + file , e ) ; } finally { try { is . close ( ) ; } catch ( IOException e ) { LOG . error ( "Unable to close input stream" , e ) ; } } if ( doc != null ) { LOG . debug ( "Wallmod configuration parsed" ) ; } return doc ;
public class BuilderFactory { /** * Return the builder for the annotation type . * @ param annotationType the annotation type being documented . * @ param prevType the previous type that was documented . * @ param nextType the next type being documented . * @ return the writer for the annotation type . Return null if this * writer is not supported by the doclet . */ public AbstractBuilder getAnnotationTypeBuilder ( TypeElement annotationType , TypeMirror prevType , TypeMirror nextType ) { } }
return AnnotationTypeBuilder . getInstance ( context , annotationType , writerFactory . getAnnotationTypeWriter ( annotationType , prevType , nextType ) ) ;
public class CreateBrokerRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( CreateBrokerRequest createBrokerRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( createBrokerRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( createBrokerRequest . getAutoMinorVersionUpgrade ( ) , AUTOMINORVERSIONUPGRADE_BINDING ) ; protocolMarshaller . marshall ( createBrokerRequest . getBrokerName ( ) , BROKERNAME_BINDING ) ; protocolMarshaller . marshall ( createBrokerRequest . getConfiguration ( ) , CONFIGURATION_BINDING ) ; protocolMarshaller . marshall ( createBrokerRequest . getCreatorRequestId ( ) , CREATORREQUESTID_BINDING ) ; protocolMarshaller . marshall ( createBrokerRequest . getDeploymentMode ( ) , DEPLOYMENTMODE_BINDING ) ; protocolMarshaller . marshall ( createBrokerRequest . getEngineType ( ) , ENGINETYPE_BINDING ) ; protocolMarshaller . marshall ( createBrokerRequest . getEngineVersion ( ) , ENGINEVERSION_BINDING ) ; protocolMarshaller . marshall ( createBrokerRequest . getHostInstanceType ( ) , HOSTINSTANCETYPE_BINDING ) ; protocolMarshaller . marshall ( createBrokerRequest . getLogs ( ) , LOGS_BINDING ) ; protocolMarshaller . marshall ( createBrokerRequest . getMaintenanceWindowStartTime ( ) , MAINTENANCEWINDOWSTARTTIME_BINDING ) ; protocolMarshaller . marshall ( createBrokerRequest . getPubliclyAccessible ( ) , PUBLICLYACCESSIBLE_BINDING ) ; protocolMarshaller . marshall ( createBrokerRequest . getSecurityGroups ( ) , SECURITYGROUPS_BINDING ) ; protocolMarshaller . marshall ( createBrokerRequest . getSubnetIds ( ) , SUBNETIDS_BINDING ) ; protocolMarshaller . marshall ( createBrokerRequest . getTags ( ) , TAGS_BINDING ) ; protocolMarshaller . marshall ( createBrokerRequest . getUsers ( ) , USERS_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class ObjectOutputStream { /** * Writes out the mapping for a class . */ protected void writeClassMapping ( int code , Class < ? > sclass ) throws IOException { } }
writeShort ( code ) ; String cname = sclass . getName ( ) ; if ( _translations != null ) { String tname = _translations . get ( cname ) ; if ( tname != null ) { cname = tname ; } } writeUTF ( cname ) ;
public class Icon { /** * Creates an { @ link Icon Icon } with the specified { @ link java . io . File File } . * < br > We here read the specified File and forward the retrieved byte data to { @ link # from ( byte [ ] ) } . * @ param file * An existing , not - null file . * @ throws IllegalArgumentException * if the provided file is either null or does not exist * @ throws IOException * if there is a problem while reading the file . * @ return An Icon instance representing the specified File * @ see net . dv8tion . jda . core . utils . IOUtil # readFully ( File ) */ public static Icon from ( File file ) throws IOException { } }
Checks . notNull ( file , "Provided File" ) ; Checks . check ( file . exists ( ) , "Provided file does not exist!" ) ; return from ( IOUtil . readFully ( file ) ) ;
public class MarketplaceAgreementsInner { /** * Save marketplace terms . * @ param publisherId Publisher identifier string of image being deployed . * @ param offerId Offer identifier string of image being deployed . * @ param planId Plan identifier string of image being deployed . * @ param parameters Parameters supplied to the Create Marketplace Terms operation . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ throws ErrorResponseException thrown if the request is rejected by server * @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @ return the AgreementTermsInner object if successful . */ public AgreementTermsInner create ( String publisherId , String offerId , String planId , AgreementTermsInner parameters ) { } }
return createWithServiceResponseAsync ( publisherId , offerId , planId , parameters ) . toBlocking ( ) . single ( ) . body ( ) ;
public class AbstractScatterplotVisualization { /** * Utility function to setup a canvas element for the visualization . * @ param svgp Plot element * @ param proj Projection to use * @ param margin Margin to use * @ param width Width * @ param height Height * @ return wrapper element with appropriate view box . */ public static Element setupCanvas ( SVGPlot svgp , Projection2D proj , double margin , double width , double height ) { } }
final CanvasSize canvas = proj . estimateViewport ( ) ; final double sizex = canvas . getDiffX ( ) ; final double sizey = canvas . getDiffY ( ) ; String transform = SVGUtil . makeMarginTransform ( width , height , sizex , sizey , margin ) + " translate(" + SVGUtil . fmt ( sizex * .5 ) + " " + SVGUtil . fmt ( sizey * .5 ) + ")" ; final Element layer = SVGUtil . svgElement ( svgp . getDocument ( ) , SVGConstants . SVG_G_TAG ) ; SVGUtil . setAtt ( layer , SVGConstants . SVG_TRANSFORM_ATTRIBUTE , transform ) ; return layer ;
public class MathBindings { /** * Binding for { @ link java . lang . Math # scalb ( double , int ) } * @ param d number to be scaled by a power of two . * @ param scaleFactor power of 2 used to scale { @ code d } * @ return { @ code d } & times ; 2 < sup > { @ code scaleFactor } < / sup > */ public static DoubleBinding scalb ( final ObservableDoubleValue d , final ObservableIntegerValue scaleFactor ) { } }
return createDoubleBinding ( ( ) -> Math . scalb ( d . get ( ) , scaleFactor . get ( ) ) , d , scaleFactor ) ;
public class ArrayTypeUtils { /** * There are two possible cases : * < ul > * < li > Type is pure class - then arrays is simple class ( e . g . { @ code int [ ] . class } or { @ code List [ ] . class } ) < / li > * < li > Type is generified - then { @ link GenericArrayType } must be used ( e . g . for parameterized type * { @ code List < String > } ) < / li > * < / ul > * @ param type type to get array of * @ return array type * @ see # toArrayClass ( Class ) for pure class case */ public static Type toArrayType ( final Type type ) { } }
return type instanceof Class ? toArrayClass ( ( Class < ? > ) type ) : new GenericArrayTypeImpl ( type ) ;
public class PipelineManager { /** * Queries the Slot with the given Key from the data store and if the Slot is * not found then throws an { @ link AbandonTaskException } . * @ param key The Key of the slot to fetch . * @ param inflate If this is { @ code true } then the Barriers that are waiting * on the Slot and the other Slots that those Barriers are waiting on * will also be fetched from the data store and used to partially * populate the graph of objects attached to the returned Slot . In * particular : { @ link Slot # getWaitingOnMeInflated ( ) } will not return * { @ code null } and also that for each of the { @ link Barrier Barriers } * returned from that method { @ link Barrier # getWaitingOnInflated ( ) } * will not return { @ code null } . * @ return A { @ code Slot } , possibly with a partially - inflated associated graph * of objects . * @ throws AbandonTaskException If either the Slot or the associated Barriers * and slots are not found in the data store . */ private static Slot querySlotOrAbandonTask ( Key key , boolean inflate ) { } }
try { return backEnd . querySlot ( key , inflate ) ; } catch ( NoSuchObjectException e ) { logger . log ( Level . WARNING , "Cannot find the slot: " + key + ". Ignoring the task." , e ) ; throw new AbandonTaskException ( ) ; }
public class SessionApi { /** * Get list of objects describes by type and names * Get list of objects describes by type and names * @ param type Type of object ( required ) * @ param names list of object names seperate by coma . ( required ) * @ return ApiSuccessResponse * @ throws ApiException If fail to call the API , e . g . server error or cannot deserialize the response body */ public ApiSuccessResponse getObjects ( String type , String names ) throws ApiException { } }
ApiResponse < ApiSuccessResponse > resp = getObjectsWithHttpInfo ( type , names ) ; return resp . getData ( ) ;
public class ExternalContextUtils { /** * Returns the contextPath of the ServletRequest or < code > null < / code > for portlet requests * @ param ec the current external context * @ return a String containing the request context path * @ see ExternalContext # getRequestContextPath ( ) * @ deprecated use ExternalContext . getRequestContextPath ( ) as of JSF 1.2 . This method * does not appropriately handle portlet environments , but the functionality * is maintained to prevent needing to change the contract . */ @ Deprecated public static String getRequestContextPath ( ExternalContext ec ) { } }
if ( ! isPortlet ( ec ) ) { return ec . getRequestContextPath ( ) ; } else { return null ; }
public class MapWithProtoValuesSubject { private final MapSubject delegate ( ) { } }
MapSubject delegate = check ( ) . that ( actual ( ) ) ; if ( internalCustomName ( ) != null ) { delegate = delegate . named ( internalCustomName ( ) ) ; } return delegate ;
public class AOStream { /** * Must be called from within synchronized ( this ) */ private final void closeInternal ( ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "closeInternal" ) ; stop ( ) ; // stop the stream dem . close ( ) ; closed = true ; // close all the JSRemoteConsumerPoints . Enumeration vEnum = consumerKeyTable . elements ( ) ; while ( vEnum . hasMoreElements ( ) ) { JSRemoteConsumerPoint aock = ( JSRemoteConsumerPoint ) vEnum . nextElement ( ) ; aock . close ( ) ; } consumerKeyTable . clear ( ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "closeInternal" ) ;