signature stringlengths 43 39.1k | implementation stringlengths 0 450k |
|---|---|
public class AbstractGrailsClass { /** * < p > Looks for a property of the reference instance with a given name and type . < / p >
* < p > If found its value is returned . We follow the Java bean conventions with augmentation for groovy support
* and static fields / properties . We will therefore match , in this order :
* < ol >
* < li > Public static field
* < li > Public static property with getter method
* < li > Standard public bean property ( with getter or just public field , using normal introspection )
* < / ol >
* @ return property value or null if no property or static field was found */
protected < T > T getPropertyOrStaticPropertyOrFieldValue ( String name , Class < T > type ) { } } | return ClassPropertyFetcher . getStaticPropertyValue ( getClazz ( ) , name , type ) ; |
public class ZookeeperRegistry { /** * 订阅
* @ param config */
protected void subscribeConsumerUrls ( ConsumerConfig config ) { } } | // 注册Consumer节点
String url = null ; if ( config . isRegister ( ) ) { try { String consumerPath = buildConsumerPath ( rootPath , config ) ; if ( consumerUrls . containsKey ( config ) ) { url = consumerUrls . get ( config ) ; } else { url = ZookeeperRegistryHelper . convertConsumerToUrl ( config ) ; consumerUrls . put ( config , url ) ; } String encodeUrl = URLEncoder . encode ( url , "UTF-8" ) ; getAndCheckZkClient ( ) . create ( ) . creatingParentContainersIfNeeded ( ) . withMode ( CreateMode . EPHEMERAL ) // Consumer临时节点
. forPath ( consumerPath + CONTEXT_SEP + encodeUrl ) ; } catch ( KeeperException . NodeExistsException nodeExistsException ) { if ( LOGGER . isWarnEnabled ( ) ) { LOGGER . warn ( "consumer has exists in zookeeper, consumer=" + url ) ; } } catch ( Exception e ) { throw new SofaRpcRuntimeException ( "Failed to register consumer to zookeeperRegistry!" , e ) ; } } |
public class WebhooksInner { /** * Gets the configuration of service URI and custom headers for the webhook .
* @ param resourceGroupName The name of the resource group to which the container registry belongs .
* @ param registryName The name of the container registry .
* @ param webhookName The name of the webhook .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the observable to the CallbackConfigInner object */
public Observable < CallbackConfigInner > getCallbackConfigAsync ( String resourceGroupName , String registryName , String webhookName ) { } } | return getCallbackConfigWithServiceResponseAsync ( resourceGroupName , registryName , webhookName ) . map ( new Func1 < ServiceResponse < CallbackConfigInner > , CallbackConfigInner > ( ) { @ Override public CallbackConfigInner call ( ServiceResponse < CallbackConfigInner > response ) { return response . body ( ) ; } } ) ; |
public class AbstractConnectorServlet { /** * Processing a new request from ElFinder client .
* @ param request
* @ param response */
protected void processRequest ( HttpServletRequest request , HttpServletResponse response ) { } } | parseRequest ( request , response ) ; if ( profile == null ) { profile = _profileService . findProfile ( request , "jbpm" ) ; } Repository repository = profile . getRepository ( ) ; if ( ! initialized ) { try { initializeDefaultRepo ( profile , repository , request ) ; initialized = true ; } catch ( Exception e ) { logger . error ( "Unable to initialize repository: " + e . getMessage ( ) ) ; } } JSONObject returnJson = new JSONObject ( ) ; try { Iterator < String > keys = requestParams . keySet ( ) . iterator ( ) ; while ( keys . hasNext ( ) ) { String key = keys . next ( ) ; } String cmd = ( String ) requestParams . get ( "cmd" ) ; if ( cmd != null && cmd . equals ( "open" ) ) { OpenCommand command = new OpenCommand ( ) ; command . init ( request , response , profile , repository , requestParams ) ; output ( response , false , command . execute ( ) ) ; } else if ( cmd != null && cmd . equals ( "mkdir" ) ) { MakeDirCommand command = new MakeDirCommand ( ) ; command . init ( request , response , profile , repository , requestParams ) ; output ( response , false , command . execute ( ) ) ; } else if ( cmd != null && cmd . equals ( "mkfile" ) ) { MakeFileCommand command = new MakeFileCommand ( ) ; command . init ( request , response , profile , repository , requestParams ) ; output ( response , false , command . execute ( ) ) ; } else if ( cmd != null && cmd . equals ( "rm" ) ) { RemoveAssetCommand command = new RemoveAssetCommand ( ) ; command . init ( request , response , profile , repository , requestParams ) ; output ( response , false , command . execute ( ) ) ; } else if ( cmd != null && cmd . equals ( "rename" ) ) { RenameCommand command = new RenameCommand ( ) ; command . init ( request , response , profile , repository , requestParams ) ; output ( response , false , command . execute ( ) ) ; } else if ( cmd != null && cmd . equals ( "paste" ) ) { PasteCommand command = new PasteCommand ( ) ; command . init ( request , response , profile , repository , requestParams ) ; output ( response , false , command . execute ( ) ) ; } else if ( cmd != null && cmd . equals ( "upload" ) ) { UploadCommand command = new UploadCommand ( ) ; command . init ( request , response , profile , repository , requestParams , listFiles , listFileStreams ) ; output ( response , false , command . execute ( ) ) ; } else if ( cmd != null && cmd . equals ( "getsvg" ) ) { try { Asset asset = profile . getRepository ( ) . loadAssetFromPath ( ( String ) requestParams . get ( "current" ) ) ; if ( asset != null && asset . getAssetContent ( ) != null ) { outputPlain ( response , false , ( String ) asset . getAssetContent ( ) , "image/svg+xml" ) ; } else { outputPlain ( response , true , "<p><b>Process image not available.</p><p>You can generate the process image in the process editor.</b></p>" , null ) ; } } catch ( NoSuchFileException e ) { logger . warn ( "Error loading process image: " + e . getMessage ( ) ) ; outputPlain ( response , true , "<p><b>Could not find process image.</p><p>You can generate the process image in the process editor.</b></p>" , null ) ; } } } catch ( Exception e ) { e . printStackTrace ( ) ; logger . error ( e . getMessage ( ) ) ; putResponse ( returnJson , "error" , e . getMessage ( ) ) ; // output the error
try { output ( response , false , returnJson ) ; } catch ( Exception ee ) { logger . error ( "" , ee ) ; } } |
public class AnnotationEventHandler { /** * Call the method into the callback object .
* @ param methodName the method name to call
* @ param event the event to trigger */
private void callMethod ( final String methodName , final Event event ) { } } | final Class < ? > ctrlClass = this . callbackObject . getClass ( ) ; try { final Method method = ctrlClass . getDeclaredMethod ( methodName , event . getClass ( ) ) ; ClassUtility . callMethod ( method , this . callbackObject , event ) ; } catch ( NoSuchMethodException | SecurityException | IllegalArgumentException | CoreException e ) { LOGGER . log ( EVENT_HANDLING_IMPOSSIBLE , e ) ; } |
public class PhaseFourImpl { /** * { @ inheritDoc } */
@ Override public void stage3CreateKAMstore ( final DBConnection db , String schemaName ) throws CreateKAMFailure { } } | if ( db == null ) { throw new InvalidArgument ( "db" , db ) ; } try { ksss . setupKAMStoreSchema ( db , schemaName ) ; } catch ( IOException e ) { throw new CreateKAMFailure ( db , e . getMessage ( ) ) ; } catch ( SQLException e ) { throw new CreateKAMFailure ( db , e . getMessage ( ) ) ; } |
public class DBFKRelationPropertySheet { /** * GEN - LAST : event _ formComponentShown */
public void setModel ( PropertySheetModel pm ) { } } | if ( pm instanceof org . apache . ojb . tools . mapping . reversedb . DBFKRelation ) { this . aRelation = ( org . apache . ojb . tools . mapping . reversedb . DBFKRelation ) pm ; this . readValuesFromReference ( ) ; } else throw new IllegalArgumentException ( ) ; |
public class SqlExporter { /** * Creates insert query field name */
private < T > String format ( final T t , final IClassContainer container ) { } } | final List < ExportContainer > exportContainers = extractExportContainers ( t , container ) ; final String resultValues = exportContainers . stream ( ) . map ( c -> convertFieldValue ( container . getField ( c . getExportName ( ) ) , c ) ) . collect ( Collectors . joining ( ", " ) ) ; return "(" + resultValues + ")" ; |
public class Constraint { /** * Creates a new map representing a { @ link Pattern } validation constraint .
* @ param regex a regular expression
* @ return a map */
static Map < String , Object > patternPayload ( final Object regex ) { } } | if ( regex == null ) { return null ; } Map < String , Object > payload = new LinkedHashMap < > ( ) ; payload . put ( "value" , regex ) ; payload . put ( "message" , MSG_PREFIX + VALIDATORS . get ( Pattern . class ) ) ; return payload ; |
public class SuppressionHandler { /** * Processes field members that have been collected during the characters
* and startElement method to construct a PropertyType object .
* @ return a PropertyType object */
private PropertyType processPropertyType ( ) { } } | final PropertyType pt = new PropertyType ( ) ; pt . setValue ( currentText . toString ( ) ) ; if ( currentAttributes != null && currentAttributes . getLength ( ) > 0 ) { final String regex = currentAttributes . getValue ( "regex" ) ; if ( regex != null ) { pt . setRegex ( Boolean . parseBoolean ( regex ) ) ; } final String caseSensitive = currentAttributes . getValue ( "caseSensitive" ) ; if ( caseSensitive != null ) { pt . setCaseSensitive ( Boolean . parseBoolean ( caseSensitive ) ) ; } } return pt ; |
public class DomainsInner { /** * Creates an ownership identifier for a domain or updates identifier details for an existing identifer .
* Creates an ownership identifier for a domain or updates identifier details for an existing identifer .
* @ param resourceGroupName Name of the resource group to which the resource belongs .
* @ param domainName Name of domain .
* @ param name Name of identifier .
* @ param domainOwnershipIdentifier A JSON representation of the domain ownership properties .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the observable to the DomainOwnershipIdentifierInner object */
public Observable < DomainOwnershipIdentifierInner > updateOwnershipIdentifierAsync ( String resourceGroupName , String domainName , String name , DomainOwnershipIdentifierInner domainOwnershipIdentifier ) { } } | return updateOwnershipIdentifierWithServiceResponseAsync ( resourceGroupName , domainName , name , domainOwnershipIdentifier ) . map ( new Func1 < ServiceResponse < DomainOwnershipIdentifierInner > , DomainOwnershipIdentifierInner > ( ) { @ Override public DomainOwnershipIdentifierInner call ( ServiceResponse < DomainOwnershipIdentifierInner > response ) { return response . body ( ) ; } } ) ; |
public class ParquetRecordReader { /** * Moves the reading position to the given block and seeks to and reads the given record .
* @ param block The block to seek to .
* @ param recordInBlock The number of the record in the block to return next . */
public void seek ( long block , long recordInBlock ) throws IOException { } } | List < BlockMetaData > blockMetaData = reader . getRowGroups ( ) ; if ( block == - 1L && recordInBlock == - 1L ) { // the split was fully consumed
currentBlock = blockMetaData . size ( ) - 1 ; numReadRecords = numTotalRecords ; numRecordsUpToCurrentBlock = numTotalRecords ; return ; } // init all counters for the start of the first block
currentBlock = 0 ; numRecordsUpToPreviousBlock = 0 ; numRecordsUpToCurrentBlock = blockMetaData . get ( 0 ) . getRowCount ( ) ; numReadRecords = 0 ; // seek to the given block
while ( currentBlock < block ) { currentBlock ++ ; reader . skipNextRowGroup ( ) ; numRecordsUpToPreviousBlock = numRecordsUpToCurrentBlock ; numRecordsUpToCurrentBlock += blockMetaData . get ( currentBlock ) . getRowCount ( ) ; numReadRecords = numRecordsUpToPreviousBlock ; } // seek to and read the given record
PageReadStore pages = reader . readNextRowGroup ( ) ; recordReader = createRecordReader ( pages ) ; for ( int i = 0 ; i <= recordInBlock ; i ++ ) { readNextRecord ( ) ; } |
public class StringContext { /** * Replace the given regular expression pattern with the given replacement .
* Only the first match will be replaced .
* @ param subject The value to replace
* @ param regex The regular expression pattern to match
* @ param replacement The replacement value
* @ return A new string based on the replacement of the expression
* @ see String # replaceFirst ( String , String ) */
public String replaceFirstRegex ( String subject , String regex , String replacement ) { } } | return subject . replaceFirst ( regex , replacement ) ; |
public class ISO639Converter { /** * Converts a two or three digit ISO - 639 language code into a human readable name for the language represented by
* the code .
* @ param aCode A two or three digit ISO - 639 code
* @ return An English name for the language represented by the two or three digit code */
public static String convert ( final String aCode ) { } } | String langName ; switch ( aCode . length ( ) ) { case 2 : langName = ISO639_1_MAP . get ( aCode . toLowerCase ( ) ) ; break ; case 3 : langName = ISO639_2_MAP . get ( aCode . toLowerCase ( ) ) ; break ; default : langName = aCode ; } return langName == null ? aCode : langName ; |
public class BitmapContainer { /** * Counts how many runs there is in the bitmap , up to a maximum
* @ param mustNotExceed maximum of runs beyond which counting is pointless
* @ return estimated number of courses */
public int numberOfRunsLowerBound ( int mustNotExceed ) { } } | int numRuns = 0 ; for ( int blockOffset = 0 ; blockOffset < bitmap . length ; blockOffset += BLOCKSIZE ) { for ( int i = blockOffset ; i < blockOffset + BLOCKSIZE ; i ++ ) { long word = bitmap [ i ] ; numRuns += Long . bitCount ( ( ~ word ) & ( word << 1 ) ) ; } if ( numRuns > mustNotExceed ) { return numRuns ; } } return numRuns ; |
public class SqlParserImpl { /** * Parse an END node . */
protected void parseEnd ( ) { } } | while ( TokenType . EOF != tokenizer . next ( ) ) { if ( tokenizer . getTokenType ( ) == TokenType . COMMENT && isEndComment ( tokenizer . getToken ( ) ) ) { pop ( ) ; return ; } parseToken ( ) ; } throw new TwoWaySQLException ( String . format ( "END comment of %s not found." , tokenizer . getSql ( ) ) ) ; |
public class FailoverGroupsInner { /** * Creates or updates a failover group .
* @ param resourceGroupName The name of the resource group that contains the resource . You can obtain this value from the Azure Resource Manager API or the portal .
* @ param serverName The name of the server containing the failover group .
* @ param failoverGroupName The name of the failover group .
* @ param parameters The failover group parameters .
* @ param serviceCallback the async ServiceCallback to handle successful and failed responses .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the { @ link ServiceFuture } object */
public ServiceFuture < FailoverGroupInner > createOrUpdateAsync ( String resourceGroupName , String serverName , String failoverGroupName , FailoverGroupInner parameters , final ServiceCallback < FailoverGroupInner > serviceCallback ) { } } | return ServiceFuture . fromResponse ( createOrUpdateWithServiceResponseAsync ( resourceGroupName , serverName , failoverGroupName , parameters ) , serviceCallback ) ; |
public class Matcher { /** * Replaces the first match this Matcher can find with replacement , as interpreted by PerlSubstitution ( so $ 1 refers
* to the first group and so on ) . Advances the search position for this Matcher , so it can also be used to
* repeatedly replace the next match when called successively .
* @ param replacement the String to replace the first match with
* @ return this Matcher ' s String it operated on , after a replacement */
public String replaceFirst ( String replacement ) { } } | TextBuffer tb = wrap ( new StringBuilder ( data . length ) ) ; Replacer . replace ( this , new PerlSubstitution ( replacement ) , tb , 1 ) ; return tb . toString ( ) ; |
public class MathUtils { /** * Rounds the specified floating point value to the nearest integer and adds it to the specified list of integers ,
* provided it is not already in the list .
* @ param result The list of integers to add to .
* @ param value The new candidate to round and add to the list . */
private static void roundAndAdd ( Collection < Integer > result , double value ) { } } | int roundedValue = ( int ) Math . round ( value ) ; if ( ! result . contains ( roundedValue ) ) { result . add ( roundedValue ) ; } |
public class SubCommandMetaGet { /** * Prints help menu for command .
* @ param stream PrintStream object for output
* @ throws IOException */
public static void printHelp ( PrintStream stream ) throws IOException { } } | stream . println ( ) ; stream . println ( "NAME" ) ; stream . println ( " meta get - Get metadata from nodes" ) ; stream . println ( ) ; stream . println ( "SYNOPSIS" ) ; stream . println ( " meta get (<meta-key-list> | all) -u <url> [-d <output-dir>]" ) ; stream . println ( " [-n <node-id-list> | --all-nodes] [--verbose]" ) ; stream . println ( ) ; stream . println ( "COMMENTS" ) ; stream . println ( " Valid meta keys are:" ) ; for ( Object key : MetadataStore . METADATA_KEYS ) { stream . println ( " " + ( String ) key ) ; } stream . println ( ) ; getParser ( ) . printHelpOn ( stream ) ; stream . println ( ) ; |
public class VariantToProtoVcfRecord { /** * Encodes the { @ link String } value of the Quality into a float .
* See { @ link VcfRecordProtoToVariantConverter # getQuality ( float ) }
* Increments one to the quality value . 0 means missing or unknown .
* @ param value String quality value
* @ return Quality */
static float encodeQuality ( String value ) { } } | final float qual ; if ( StringUtils . isEmpty ( value ) || value . equals ( "." ) ) { qual = MISSING_QUAL_VALUE ; } else { qual = Float . parseFloat ( value ) ; } return qual + 1 ; |
public class Sign { /** * Given an arbitrary piece of text and an Ethereum message signature encoded in bytes ,
* returns the public key that was used to sign it . This can then be compared to the expected
* public key to determine if the signature was correct .
* @ param message RLP encoded message .
* @ param signatureData The message signature components
* @ return the public key used to sign the message
* @ throws SignatureException If the public key could not be recovered or if there was a
* signature format error . */
public static BigInteger signedMessageToKey ( byte [ ] message , SignatureData signatureData ) throws SignatureException { } } | return signedMessageHashToKey ( Hash . sha3 ( message ) , signatureData ) ; |
public class CreateConditionalForwarderRequestMarshaller { /** * Marshall the given parameter object . */
public void marshall ( CreateConditionalForwarderRequest createConditionalForwarderRequest , ProtocolMarshaller protocolMarshaller ) { } } | if ( createConditionalForwarderRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( createConditionalForwarderRequest . getDirectoryId ( ) , DIRECTORYID_BINDING ) ; protocolMarshaller . marshall ( createConditionalForwarderRequest . getRemoteDomainName ( ) , REMOTEDOMAINNAME_BINDING ) ; protocolMarshaller . marshall ( createConditionalForwarderRequest . getDnsIpAddrs ( ) , DNSIPADDRS_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class UnicodeFilter { /** * Default implementation of UnicodeMatcher : : matches ( ) for Unicode
* filters . Matches a single 16 - bit code unit at offset . */
@ Override public int matches ( Replaceable text , int [ ] offset , int limit , boolean incremental ) { } } | int c ; if ( offset [ 0 ] < limit && contains ( c = text . char32At ( offset [ 0 ] ) ) ) { offset [ 0 ] += UTF16 . getCharCount ( c ) ; return U_MATCH ; } if ( offset [ 0 ] > limit && contains ( text . char32At ( offset [ 0 ] ) ) ) { // Backup offset by 1 , unless the preceding character is a
// surrogate pair - - then backup by 2 ( keep offset pointing at
// the lead surrogate ) .
-- offset [ 0 ] ; if ( offset [ 0 ] >= 0 ) { offset [ 0 ] -= UTF16 . getCharCount ( text . char32At ( offset [ 0 ] ) ) - 1 ; } return U_MATCH ; } if ( incremental && offset [ 0 ] == limit ) { return U_PARTIAL_MATCH ; } return U_MISMATCH ; |
public class ServerRequestCreateUrl { /** * Calls the callback with the URL . This should be called on finding an existing url
* up on trying to create a URL asynchronously
* @ param url existing url with for the given data */
public void onUrlAvailable ( String url ) { } } | if ( callback_ != null ) { callback_ . onLinkCreate ( url , null ) ; } updateShareEventToFabric ( url ) ; |
public class CSSModuleBuilder { /** * Initializes Rhino with PostCSS and configured plugins . The minifier plugin is
* always added . Additional plugins are added based on configuration .
* See { @ link CSSModuleBuilder } for a description of the plugin config JavaScript
* @ param config The config object
* @ throws IllegalArgumentException if any of the config parameters are not valid
* @ throws RuntimeException for any other exception caught within this module */
protected void initPostcss ( IConfig config ) { } } | final String sourceMethod = "initPostcss" ; // $ NON - NLS - 1 $
final boolean isTraceLogging = log . isLoggable ( Level . FINER ) ; if ( isTraceLogging ) { log . entering ( sourceClass , sourceMethod , new Object [ ] { config } ) ; } pluginInfoList = new ArrayList < PluginInfo > ( ) ; Context cx = Context . enter ( ) ; try { Scriptable configScript = ( Scriptable ) config . getRawConfig ( ) ; final Scriptable configScope = ( Scriptable ) config . getConfigScope ( ) ; int scopePoolSize = DEFAULT_SCOPE_POOL_SIZE ; if ( ctorScopePoolSize != 0 ) { scopePoolSize = ctorScopePoolSize ; } else { // Read the scope pool size if specified
Object scopePoolSizeConfig = configScript . get ( SCOPEPOOLSIZE_CONFIGPARAM , configScript ) ; if ( scopePoolSizeConfig != Scriptable . NOT_FOUND ) { scopePoolSize = ( ( Number ) scopePoolSizeConfig ) . intValue ( ) ; } } // Create a new scope to evaluate the minifier initialization code because configScope is sealed .
Scriptable scope = cx . newObject ( configScope ) ; scope . setParentScope ( configScope ) ; minifierInitScript . exec ( cx , scope ) ; pluginInfoList . add ( new PluginInfo ( ( Function ) scope . get ( MINIFIER_INITIALIZATION_VAR , scope ) , minifyJsScript ) ) ; /* * Now load and initialize configured plugins and add them to the plugin array */
Object postcssConfigObj = configScript . get ( POSTCSS_CONFIGPARAM , configScript ) ; if ( postcssConfigObj != Scriptable . NOT_FOUND ) { Scriptable postcssConfig = ( Scriptable ) postcssConfigObj ; // Process any specified plugin configs
Object pluginsConfigObj = postcssConfig . get ( PLUGINS , postcssConfig ) ; if ( pluginsConfigObj != Scriptable . NOT_FOUND ) { Scriptable pluginsConfig = ( Scriptable ) pluginsConfigObj ; Object [ ] ids = pluginsConfig . getIds ( ) ; for ( int i = 0 ; i < ids . length ; i ++ ) { if ( ! ( ids [ i ] instanceof Number ) ) { // ignore named properties
continue ; } Object pluginInfoObj = pluginsConfig . get ( i , configScope ) ; if ( pluginInfoObj instanceof Scriptable ) { if ( isTraceLogging ) { log . logp ( Level . FINER , sourceClass , sourceMethod , "Processing plugin config " + Context . toString ( pluginInfoObj ) ) ; // $ NON - NLS - 1 $
} Scriptable pluginInfo = ( Scriptable ) pluginInfoObj ; Object locationObj = pluginInfo . get ( 0 , configScope ) ; if ( ! ( locationObj instanceof String ) ) { throw new IllegalArgumentException ( Context . toString ( pluginInfo ) ) ; } String location = ( String ) locationObj ; URI uri = URI . create ( ( String ) location ) ; if ( uri == null ) { throw new IllegalArgumentException ( Context . toString ( pluginInfo ) ) ; } Object initializerObj = pluginInfo . get ( 1 , configScope ) ; if ( ! ( initializerObj instanceof Function ) ) { throw new IllegalArgumentException ( Context . toString ( pluginInfo ) ) ; } Function initializer = ( Function ) initializerObj ; if ( ! uri . isAbsolute ( ) ) { uri = config . locateModuleResource ( location , true ) ; } InputStream is = aggregator . newResource ( uri ) . getInputStream ( ) ; String js ; try { js = IOUtils . toString ( is , UTF8_CHARSET ) ; } catch ( JavaScriptException e ) { // Add module info
String message = "Error evaluating or initializing plugin " + location + "\r\n" + e . getMessage ( ) ; // $ NON - NLS - 1 $ / / $ NON - NLS - 2 $
throw new RuntimeException ( message , e ) ; } finally { IOUtils . closeQuietly ( is ) ; } Script script = null ; cx . setOptimizationLevel ( 9 ) ; try { script = cx . compileString ( js , uri . toString ( ) , 1 , null ) ; } catch ( EvaluatorException e ) { // Try with optimization disabled
cx . setOptimizationLevel ( - 1 ) ; script = cx . compileString ( js , uri . toString ( ) , 1 , null ) ; } pluginInfoList . add ( new PluginInfo ( initializer , script ) ) ; } } } } // Create the thread scope pool
threadScopes = new ArrayBlockingQueue < Scriptable > ( scopePoolSize ) ; // Now create the thread scope pool . We use a thread pool executor service to
// create the scope pool in order to take advantage of parallel processing
// capabilities on multi - core processors .
if ( es == null ) { es = Executors . newFixedThreadPool ( INITIALIZER_THREAD_POOL_SIZE ) ; } final CompletionService < Scriptable > cs = new ExecutorCompletionService < Scriptable > ( es ) ; for ( int i = 0 ; i < scopePoolSize ; i ++ ) { cs . submit ( new Callable < Scriptable > ( ) { @ Override public Scriptable call ( ) throws Exception { Context ctx = Context . enter ( ) ; try { return createThreadScope ( ctx , configScope ) ; } finally { Context . exit ( ) ; } } } ) ; } // now get the results
for ( int i = 0 ; i < scopePoolSize ; i ++ ) { Scriptable threadScope = SignalUtil . take ( cs , sourceClass , sourceMethod ) . get ( ) ; // Seal the scopes to prevent changes
( ( ScriptableObject ) threadScope ) . sealObject ( ) ; threadScopes . add ( threadScope ) ; } // Shut down the executor and release the threads
es . shutdown ( ) ; } catch ( Exception e ) { throw new RuntimeException ( e . getMessage ( ) , e ) ; } finally { Context . exit ( ) ; } if ( isTraceLogging ) { log . exiting ( sourceClass , sourceMethod ) ; } |
public class ASTProcessor { /** * Parses the provided file , using the given libraryPaths and sourcePaths as context . The libraries may be either
* jar files or references to directories containing class files .
* The sourcePaths must be a reference to the top level directory for sources ( eg , for a file
* src / main / java / org / example / Foo . java , the source path would be src / main / java ) .
* The wildcard resolver provides a fallback for processing wildcard imports that the underlying parser was unable
* to resolve . */
public static List < ClassReference > analyze ( WildcardImportResolver importResolver , Set < String > libraryPaths , Set < String > sourcePaths , Path sourceFile ) { } } | ASTParser parser = ASTParser . newParser ( AST . JLS11 ) ; parser . setEnvironment ( libraryPaths . toArray ( new String [ libraryPaths . size ( ) ] ) , sourcePaths . toArray ( new String [ sourcePaths . size ( ) ] ) , null , true ) ; parser . setBindingsRecovery ( false ) ; parser . setResolveBindings ( true ) ; Map options = JavaCore . getOptions ( ) ; JavaCore . setComplianceOptions ( JavaCore . VERSION_1_8 , options ) ; parser . setCompilerOptions ( options ) ; String fileName = sourceFile . getFileName ( ) . toString ( ) ; parser . setUnitName ( fileName ) ; try { parser . setSource ( FileUtils . readFileToString ( sourceFile . toFile ( ) ) . toCharArray ( ) ) ; } catch ( IOException e ) { throw new ASTException ( "Failed to get source for file: " + sourceFile . toString ( ) + " due to: " + e . getMessage ( ) , e ) ; } parser . setKind ( ASTParser . K_COMPILATION_UNIT ) ; CompilationUnit cu = ( CompilationUnit ) parser . createAST ( null ) ; ReferenceResolvingVisitor visitor = new ReferenceResolvingVisitor ( importResolver , cu , sourceFile . toString ( ) ) ; cu . accept ( visitor ) ; return visitor . getJavaClassReferences ( ) ; |
public class HystrixCommandExecutionHook { /** * DEPRECATED : Change usages of this to { @ link # onError } .
* Invoked after failed completion of { @ link HystrixCommand } execution .
* @ param commandInstance
* The executing HystrixCommand instance .
* @ param failureType
* { @ link FailureType } representing the type of failure that occurred .
* See { @ link HystrixRuntimeException } for more information .
* @ param e
* Exception thrown by { @ link HystrixCommand }
* @ return Exception that can be decorated , replaced or just returned as a pass - thru .
* @ since 1.2 */
@ Deprecated public < T > Exception onError ( HystrixCommand < T > commandInstance , FailureType failureType , Exception e ) { } } | // pass - thru by default
return e ; |
public class EvolutionUtils { /** * Sorts an evaluated population in descending order of fitness
* ( descending order of fitness score for natural scores , ascending
* order of scores for non - natural scores ) .
* @ param evaluatedPopulation The population to be sorted ( in - place ) .
* @ param naturalFitness True if higher fitness scores mean fitter individuals , false otherwise .
* @ param < T > The type of entity that is being evolved . */
public static < T > void sortEvaluatedPopulation ( List < EvaluatedCandidate < T > > evaluatedPopulation , boolean naturalFitness ) { } } | // Sort candidates in descending order according to fitness .
if ( naturalFitness ) // Descending values for natural fitness .
{ Collections . sort ( evaluatedPopulation , Collections . reverseOrder ( ) ) ; } else // Ascending values for non - natural fitness .
{ Collections . sort ( evaluatedPopulation ) ; } |
public class RedisSortSet { /** * 删除有序集合中的一个成员
* @ param member
* @ return */
public boolean remove ( Object mem ) { } } | try { boolean result = false ; if ( isCluster ( groupName ) ) { result = getBinaryJedisClusterCommands ( groupName ) . zrem ( keyBytes , valueSerialize ( mem ) ) >= 1 ; } else { result = getBinaryJedisCommands ( groupName ) . zrem ( keyBytes , valueSerialize ( mem ) ) >= 1 ; } return result ; } finally { getJedisProvider ( groupName ) . release ( ) ; } |
public class JSONObject { /** * A Simple Helper cast an Object to an Number
* @ return a Number or null */
public Number getAsNumber ( String key ) { } } | Object obj = this . get ( key ) ; if ( obj == null ) return null ; if ( obj instanceof Number ) return ( Number ) obj ; return Long . valueOf ( obj . toString ( ) ) ; |
public class Sneaky { /** * Sneaky throws a Predicate lambda .
* @ param predicate Predicate that can throw an exception
* @ param < T > type of first argument
* @ return a Predicate as defined in java . util . function */
public static < T , E extends Exception > Predicate < T > sneaked ( SneakyPredicate < T , E > predicate ) { } } | return t -> { @ SuppressWarnings ( "unchecked" ) SneakyPredicate < T , RuntimeException > castedSneakyPredicate = ( SneakyPredicate < T , RuntimeException > ) predicate ; return castedSneakyPredicate . test ( t ) ; } ; |
public class Artwork { /** * Serializes this artwork object to a { @ link Bundle } representation .
* @ return a serialized version of the artwork .
* @ see # fromBundle */
@ NonNull public Bundle toBundle ( ) { } } | Bundle bundle = new Bundle ( ) ; bundle . putString ( KEY_COMPONENT_NAME , ( mComponentName != null ) ? mComponentName . flattenToShortString ( ) : null ) ; bundle . putString ( KEY_IMAGE_URI , ( mImageUri != null ) ? mImageUri . toString ( ) : null ) ; bundle . putString ( KEY_TITLE , mTitle ) ; bundle . putString ( KEY_BYLINE , mByline ) ; bundle . putString ( KEY_ATTRIBUTION , mAttribution ) ; bundle . putString ( KEY_TOKEN , mToken ) ; bundle . putString ( KEY_VIEW_INTENT , ( mViewIntent != null ) ? mViewIntent . toUri ( Intent . URI_INTENT_SCHEME ) : null ) ; bundle . putString ( KEY_META_FONT , mMetaFont ) ; bundle . putLong ( KEY_DATE_ADDED , mDateAdded != null ? mDateAdded . getTime ( ) : 0 ) ; return bundle ; |
public class SimpleDocTreeVisitor { /** * { @ inheritDoc } This implementation calls { @ code defaultAction } .
* @ param node { @ inheritDoc }
* @ param p { @ inheritDoc }
* @ return the result of { @ code defaultAction } */
@ Override public R visitSerial ( SerialTree node , P p ) { } } | return defaultAction ( node , p ) ; |
public class TypeGraph { /** * Attempts to find the best { @ link com . merakianalytics . datapipelines . ChainTransform } to get from a type to another
* @ param from
* the type to convert from
* @ param to
* the type to convert to
* @ return the best { @ link com . merakianalytics . datapipelines . ChainTransform } between the types , or null if there is none */
@ SuppressWarnings ( "unchecked" ) // Generics are not the strong suit of Hipster4j .
public ChainTransform getTransform ( final Class from , final Class < ? > to ) { } } | if ( from . equals ( to ) ) { return ChainTransform . identity ( to ) ; } final SearchProblem problem = GraphSearchProblem . startingFrom ( from ) . in ( graph ) . extractCostFromEdges ( new Function < DataTransformer , Double > ( ) { @ Override public Double apply ( final DataTransformer transformer ) { return new Double ( transformer . cost ( ) ) ; } } ) . build ( ) ; final List < Class < ? > > path = ( List < Class < ? > > ) Hipster . createDijkstra ( problem ) . search ( to ) . getOptimalPaths ( ) . get ( 0 ) ; if ( path == null || ! path . get ( path . size ( ) - 1 ) . equals ( to ) ) { return null ; } final List < DataTransformer > transform = new LinkedList < > ( ) ; for ( int i = 1 ; i < path . size ( ) ; i ++ ) { transform . add ( cheapest . get ( path . get ( i - 1 ) ) . get ( path . get ( i ) ) ) ; } return new ChainTransform ( from , to , path , transform ) ; |
public class WildcardFilenameFilter { /** * / * ( non - Javadoc )
* @ see java . io . FilenameFilter # accept ( java . io . File , java . lang . String ) */
public boolean accept ( File dir , String name ) { } } | for ( List < String > pattern : _patterns ) { boolean match = match ( pattern , name ) ; if ( match ) return ! _exclude ; } return _exclude ; |
public class BaseScope { /** * Iterates over the entries of the specified map calling potentially registered
* { @ link Disposable } s .
* @ param scopeMap
* the scope map */
protected void performDisposal ( final Map < Key < ? > , Object > scopeMap ) { } } | for ( Entry < Key < ? > , Object > entry : scopeMap . entrySet ( ) ) { Key < ? > key = entry . getKey ( ) ; // warning can be safely suppressed , we always get a Disposable and
// the type parameter < Object > does not hurt here at runtime
@ SuppressWarnings ( "unchecked" ) Disposable < Object > disposable = ( Disposable < Object > ) disposables . get ( key ) ; if ( disposable != null ) { disposable . dispose ( entry . getValue ( ) ) ; } } |
public class FessMessages { /** * Add the created action message for the key ' success . changed _ password ' with parameters .
* < pre >
* message : Changed your password .
* < / pre >
* @ param property The property name for the message . ( NotNull )
* @ return this . ( NotNull ) */
public FessMessages addSuccessChangedPassword ( String property ) { } } | assertPropertyNotNull ( property ) ; add ( property , new UserMessage ( SUCCESS_changed_password ) ) ; return this ; |
public class SessionDriver { /** * Stops session acquired by remote JT
* @ param remoteId id of remote JT session */
public void stopRemoteSession ( String remoteId ) { } } | cmNotifier . addCall ( new ClusterManagerService . sessionEnd_args ( remoteId , SessionStatus . TIMED_OUT ) ) ; |
public class Strings { /** * 将1-2,3,4-9之类的序列拆分成数组
* @ param numSeq
* a { @ link java . lang . String } object .
* @ return an array of { @ link java . lang . Integer } objects . */
public static Integer [ ] splitNumSeq ( final String numSeq ) { } } | if ( isEmpty ( numSeq ) ) { return null ; } String [ ] numArray = split ( numSeq , ',' ) ; Set < Integer > numSet = CollectUtils . newHashSet ( ) ; for ( int i = 0 ; i < numArray . length ; i ++ ) { String num = numArray [ i ] ; if ( num . contains ( "-" ) ) { String [ ] termFromTo = split ( num , '-' ) ; int from = Numbers . toInt ( termFromTo [ 0 ] ) ; int to = Numbers . toInt ( termFromTo [ 1 ] ) ; for ( int j = from ; j <= to ; j ++ ) { numSet . add ( Integer . valueOf ( j ) ) ; } } else { numSet . add ( new Integer ( num ) ) ; } } Integer [ ] nums = new Integer [ numSet . size ( ) ] ; numSet . toArray ( nums ) ; return nums ; |
public class DefaultGroovyMethods { /** * Finds all values matching the closure condition .
* < pre class = " groovyTestCase " > assert ( [ 2,4 ] as Set ) = = ( [ 1,2,3,4 ] as Set ) . findAll { it % 2 = = 0 } < / pre >
* @ param self a Set
* @ param closure a closure condition
* @ return a Set of matching values
* @ since 2.4.0 */
public static < T > Set < T > findAll ( Set < T > self , @ ClosureParams ( FirstParam . FirstGenericType . class ) Closure closure ) { } } | return ( Set < T > ) findAll ( ( Collection < T > ) self , closure ) ; |
public class AwsSecurityFindingFilters { /** * Specifies the type of the resource for which details are provided .
* @ param resourceType
* Specifies the type of the resource for which details are provided . */
public void setResourceType ( java . util . Collection < StringFilter > resourceType ) { } } | if ( resourceType == null ) { this . resourceType = null ; return ; } this . resourceType = new java . util . ArrayList < StringFilter > ( resourceType ) ; |
public class AmazonRoute53Client { /** * Gets information about all of the versions for a specified traffic policy .
* Traffic policy versions are listed in numerical order by < code > VersionNumber < / code > .
* @ param listTrafficPolicyVersionsRequest
* A complex type that contains the information about the request to list your traffic policies .
* @ return Result of the ListTrafficPolicyVersions operation returned by the service .
* @ throws InvalidInputException
* The input is not valid .
* @ throws NoSuchTrafficPolicyException
* No traffic policy exists with the specified ID .
* @ sample AmazonRoute53 . ListTrafficPolicyVersions
* @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / route53-2013-04-01 / ListTrafficPolicyVersions "
* target = " _ top " > AWS API Documentation < / a > */
@ Override public ListTrafficPolicyVersionsResult listTrafficPolicyVersions ( ListTrafficPolicyVersionsRequest request ) { } } | request = beforeClientExecution ( request ) ; return executeListTrafficPolicyVersions ( request ) ; |
public class WapitiCRFModel { /** * 加载特征标签转换
* @ param br
* @ return
* @ throws Exception */
private int [ ] loadTagCoven ( BufferedReader br ) throws Exception { } } | int [ ] conver = new int [ Config . TAG_NUM + Config . TAG_NUM * Config . TAG_NUM ] ; String temp = br . readLine ( ) ; // # qrk # 4
// TODO : 这个是个写死的过程 , 如果标签发生改变需要重新来写这里
for ( int i = 0 ; i < Config . TAG_NUM ; i ++ ) { char c = br . readLine ( ) . split ( ":" ) [ 1 ] . charAt ( 0 ) ; switch ( c ) { case 'S' : conver [ i ] = Config . S ; break ; case 'B' : conver [ i ] = Config . B ; break ; case 'M' : conver [ i ] = Config . M ; break ; case 'E' : conver [ i ] = Config . E ; break ; default : throw new Exception ( "err tag named " + c + " in model " + temp ) ; } } for ( int i = Config . TAG_NUM ; i < conver . length ; i ++ ) { conver [ i ] = conver [ ( i - 4 ) / Config . TAG_NUM ] * Config . TAG_NUM + conver [ i % Config . TAG_NUM ] + Config . TAG_NUM ; } return conver ; |
import java . util . ArrayList ; import java . util . Collections ; public class AlternateExtremeSort { /** * Rearranges the list of integers in alternation of smallest and largest values , starting with the smallest .
* The process begins with the lowest value , followed by the highest of the remaining numbers , and continues .
* Examples :
* alternate _ extreme _ sort ( [ 1 , 2 , 3 , 4 ] ) = = [ 1 , 4 , 2 , 3]
* alternate _ extreme _ sort ( [ 5 , 5 , 5 , 5 ] ) = = [ 5 , 5 , 5 , 5]
* alternate _ extreme _ sort ( [ ] ) = = [ ]
* Args :
* inputList : A list of integers
* Returns :
* result : A list sorted in the alternate _ extreme _ sort way . */
public static ArrayList < Integer > alternateExtremeSort ( ArrayList < Integer > inputList ) { } } | ArrayList < Integer > result = new ArrayList < Integer > ( ) ; boolean toggle = true ; while ( ! inputList . isEmpty ( ) ) { if ( toggle ) { result . add ( Collections . min ( inputList ) ) ; } else { result . add ( Collections . max ( inputList ) ) ; } inputList . remove ( result . get ( result . size ( ) - 1 ) ) ; toggle = ! toggle ; } return result ; |
public class ForecastBreakdown { /** * Gets the startTime value for this ForecastBreakdown .
* @ return startTime * The starting time of the represented breakdown . */
public com . google . api . ads . admanager . axis . v201811 . DateTime getStartTime ( ) { } } | return startTime ; |
public class NtlmSsp { /** * Calls the static { @ link # authenticate ( CIFSContext , HttpServletRequest ,
* HttpServletResponse , byte [ ] ) } method to perform NTLM authentication
* for the specified servlet request .
* @ param tc
* @ param req
* The request being serviced .
* @ param resp
* The response .
* @ param challenge
* The domain controller challenge .
* @ return credentials passed in the servlet request
* @ throws IOException
* If an IO error occurs . */
public NtlmPasswordAuthentication doAuthentication ( CIFSContext tc , HttpServletRequest req , HttpServletResponse resp , byte [ ] challenge ) throws IOException { } } | return authenticate ( tc , req , resp , challenge ) ; |
public class RadioConverter { /** * Constructor .
* @ param converter The next converter in the converter chain .
* @ param strTarget If the radio button is set , set this converter to this target string .
* @ param bTrueIfMatch If true , sets value on setState ( true ) , otherwise sets it to blank . */
public void init ( Converter converter , Object objTarget , boolean bTrueIfMatch ) { } } | super . init ( converter , null , null ) ; m_objTarget = objTarget ; m_bTrueIfMatch = bTrueIfMatch ; |
public class SSLWriteServiceContext { /** * See method above . Extra parameter tells if the request was from a formerly queued request .
* @ param _ numBytes
* @ param userCallback
* @ param forceQueue
* @ param timeout
* @ param fromQueue
* @ return VirtualConnection */
protected VirtualConnection write ( long _numBytes , TCPWriteCompletedCallback userCallback , boolean forceQueue , int timeout , boolean fromQueue ) { } } | if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) { Tr . entry ( tc , "writeAsync, numBytes=" + _numBytes + ", timeout=" + timeout + ", fromQueue=" + fromQueue + ", vc=" + getVCHash ( ) ) ; } VirtualConnection vc = null ; try { long numBytes = _numBytes ; // Handle timing out of former read request .
if ( timeout == IMMED_TIMEOUT || timeout == ABORT_TIMEOUT ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "Requested to timeout former request. Calling device side." ) ; } getConnLink ( ) . getDeviceWriteInterface ( ) . write ( numBytes , this , forceQueue , timeout ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) { Tr . exit ( tc , "writeAsync: " + getVC ( ) ) ; } return getVC ( ) ; } // Look for errors in the request .
IOException exceptionInRequest = checkForErrors ( numBytes , true ) ; if ( exceptionInRequest != null ) { // Found an error .
boolean fireHere = true ; if ( forceQueue ) { // Error must be returned on a separate thread .
queuedWork . setErrorParameters ( getConnLink ( ) . getVirtualConnection ( ) , this , userCallback , exceptionInRequest ) ; EventEngine events = SSLChannelProvider . getEventService ( ) ; if ( null == events ) { Exception e = new Exception ( "missing event admin" ) ; FFDCFilter . processException ( e , getClass ( ) . getName ( ) , "172" , this ) ; // fall - thru below and use callback here regardless
} else { // fire an event to continue this queued work
Event event = events . createEvent ( SSLEventHandler . TOPIC_QUEUED_WORK ) ; event . setProperty ( SSLEventHandler . KEY_RUNNABLE , this . queuedWork ) ; events . postEvent ( event ) ; fireHere = false ; } } if ( fireHere ) { // Call the callback on this thread .
userCallback . error ( getConnLink ( ) . getVirtualConnection ( ) , this , exceptionInRequest ) ; } // Return null indicating that the callback will handle the response .
return null ; } // Get the work on another thread if queuing is being forced .
if ( forceQueue ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "Forcing write request to another thread, vc=" + getVCHash ( ) ) ; } queuedWork . setWriteParameters ( numBytes , userCallback , timeout ) ; EventEngine events = SSLChannelProvider . getEventService ( ) ; if ( null == events ) { IOException e = new IOException ( "missing event admin" ) ; FFDCFilter . processException ( e , getClass ( ) . getName ( ) , "471" , this ) ; userCallback . error ( getConnLink ( ) . getVirtualConnection ( ) , this , e ) ; } else { // fire an event to continue this queued work
Event event = events . createEvent ( SSLEventHandler . TOPIC_QUEUED_WORK ) ; event . setProperty ( SSLEventHandler . KEY_RUNNABLE , this . queuedWork ) ; events . postEvent ( event ) ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) { Tr . exit ( tc , "writeAsync: null" ) ; } return null ; } callback = userCallback ; // Adjust size of numBytes if all data is requested to be written ( - 1 ) .
if ( numBytes == TCPWriteRequestContext . WRITE_ALL_DATA ) { numBytes = WsByteBufferUtils . lengthOf ( getBuffers ( ) ) ; } SSLEngineResult sslResult = null ; // Check if a handshake is needed .
if ( SSLUtils . isHandshaking ( getConnLink ( ) . getSSLEngine ( ) ) ) { // A handshake is needed . Set the write parameters .
handshakeCallback . setWriteParameters ( numBytes , timeout ) ; try { sslResult = doHandshake ( handshakeCallback ) ; } catch ( IOException e ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "Caught exception during SSL handshake, " + e ) ; } callback . error ( getConnLink ( ) . getVirtualConnection ( ) , this , e ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) { Tr . exit ( tc , "writeAsynch: null" ) ; } return null ; } // Check to see if handshake was done synchronously .
if ( sslResult != null ) { // Handshake was done synchronously . Verify results .
if ( sslResult . getHandshakeStatus ( ) != HandshakeStatus . FINISHED ) { IOException e = new IOException ( "Unable to complete SSLhandshake" ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "Unable to complete SSLhandshake, " + e ) ; } callback . error ( getConnLink ( ) . getVirtualConnection ( ) , this , e ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) { Tr . exit ( tc , "writeAsynch: null" ) ; } return null ; } } else { // Handshake is being handled asynchronously .
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) { Tr . exit ( tc , "writeAsynch: null" ) ; } return null ; } } // Code can only get here if handshake wasn ' t needed or was done sync with valid return code .
// Encrypt the data and write it to the network .
vc = encryptAndWriteAsync ( numBytes , false , timeout ) ; // If data is ready , but this was from a formerly queued request , call the callback .
if ( vc != null && fromQueue ) { callback . complete ( vc , this ) ; vc = null ; } } catch ( Exception original ) { synchronized ( closeSync ) { // if close has been called then assume this exception was due to a race condition
// with the close logic . so no FFDC here .
if ( closeCalled ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "Cannot write, the link was already closed; vc=" + this . getVCHash ( ) ) ; } return null ; } else { IOException ioe ; if ( original instanceof IOException ) { ioe = ( IOException ) original ; } ioe = new IOException ( "writeAsynch failed with exception: " + original . getMessage ( ) ) ; boolean fireHere = true ; if ( forceQueue ) { // Error must be returned on a separate thread .
queuedWork . setErrorParameters ( getConnLink ( ) . getVirtualConnection ( ) , this , userCallback , ioe ) ; EventEngine events = SSLChannelProvider . getEventService ( ) ; if ( null != events ) { // fire an event to continue this queued work
Event event = events . createEvent ( SSLEventHandler . TOPIC_QUEUED_WORK ) ; event . setProperty ( SSLEventHandler . KEY_RUNNABLE , this . queuedWork ) ; events . postEvent ( event ) ; fireHere = false ; } } if ( fireHere ) { // Call the callback on this thread .
userCallback . error ( getConnLink ( ) . getVirtualConnection ( ) , this , ioe ) ; } return null ; } } } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) { Tr . exit ( tc , "writeAsynch: " + vc ) ; } return vc ; |
public class VirtualMachineExtensionsInner { /** * The operation to update the extension .
* @ param resourceGroupName The name of the resource group .
* @ param vmName The name of the virtual machine where the extension should be updated .
* @ param vmExtensionName The name of the virtual machine extension .
* @ param extensionParameters Parameters supplied to the Update Virtual Machine Extension operation .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ throws CloudException thrown if the request is rejected by server
* @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
* @ return the VirtualMachineExtensionInner object if successful . */
public VirtualMachineExtensionInner beginUpdate ( String resourceGroupName , String vmName , String vmExtensionName , VirtualMachineExtensionUpdate extensionParameters ) { } } | return beginUpdateWithServiceResponseAsync ( resourceGroupName , vmName , vmExtensionName , extensionParameters ) . toBlocking ( ) . single ( ) . body ( ) ; |
public class AWSDeviceFarmClient { /** * Installs an application to the device in a remote access session . For Android applications , the file must be in
* . apk format . For iOS applications , the file must be in . ipa format .
* @ param installToRemoteAccessSessionRequest
* Represents the request to install an Android application ( in . apk format ) or an iOS application ( in . ipa
* format ) as part of a remote access session .
* @ return Result of the InstallToRemoteAccessSession operation returned by the service .
* @ throws ArgumentException
* An invalid argument was specified .
* @ throws NotFoundException
* The specified entity was not found .
* @ throws LimitExceededException
* A limit was exceeded .
* @ throws ServiceAccountException
* There was a problem with the service account .
* @ sample AWSDeviceFarm . InstallToRemoteAccessSession
* @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / devicefarm - 2015-06-23 / InstallToRemoteAccessSession "
* target = " _ top " > AWS API Documentation < / a > */
@ Override public InstallToRemoteAccessSessionResult installToRemoteAccessSession ( InstallToRemoteAccessSessionRequest request ) { } } | request = beforeClientExecution ( request ) ; return executeInstallToRemoteAccessSession ( request ) ; |
public class JobChangeLog { public static void setLogLevelDebug ( boolean logLevelDebug ) { } } | assertUnlocked ( ) ; if ( logger . isInfoEnabled ( ) ) { logger . info ( "...Setting job-change logLevelDebug: " + logLevelDebug ) ; } _logLevelDebug = logLevelDebug ; lock ( ) ; // auto - lock here , because of deep world |
public class BackgroundCache { /** * Gets the value if currently in the cache . If not ,
* Runs the refresher immediately to obtain the result , then
* places an entry into the cache .
* @ return The result obtained from either the cache or this refresher
* @ see # get ( java . lang . Object )
* @ see # put ( java . lang . Object , com . aoindustries . cache . BackgroundCache . Refresher ) */
public Result < V , E > get ( K key , Refresher < ? super K , ? extends V , ? extends E > refresher ) { } } | Result < V , E > result = get ( key ) ; if ( result == null ) result = put ( key , refresher ) ; return result ; |
public class RequestHelper { /** * Get the full URI ( excl . protocol ) and parameters of the passed request . < br >
* Example :
* < pre >
* / mywebapp / servlet / dir / a / b . xml = 123 ? d = 789
* < / pre >
* @ param aHttpRequest
* The request to use . May not be < code > null < / code > .
* @ return The full URI .
* @ see # getURL ( HttpServletRequest ) getURL to retrieve the absolute URL */
@ Nonnull @ Nonempty public static String getURI ( @ Nonnull final HttpServletRequest aHttpRequest ) { } } | ValueEnforcer . notNull ( aHttpRequest , "HttpRequest" ) ; final String sReqUrl = getRequestURI ( aHttpRequest ) ; final String sQueryString = ServletHelper . getRequestQueryString ( aHttpRequest ) ; // d = 789 & x = y
if ( StringHelper . hasText ( sQueryString ) ) return sReqUrl + URLHelper . QUESTIONMARK + sQueryString ; return sReqUrl ; |
public class CmsSetupBean { /** * Returns html for the given component to fill the selection list . < p >
* @ param component the component to generate the code for
* @ return html code */
protected String htmlComponent ( CmsSetupComponent component ) { } } | StringBuffer html = new StringBuffer ( 256 ) ; html . append ( "\t<tr>\n" ) ; html . append ( "\t\t<td>\n" ) ; html . append ( "\t\t\t<input type='checkbox' name='availableComponents' value='" ) ; html . append ( component . getId ( ) ) ; html . append ( "'" ) ; if ( component . isChecked ( ) ) { html . append ( " checked='checked'" ) ; } html . append ( " onClick=\"checkComponentDependencies('" ) ; html . append ( component . getId ( ) ) ; html . append ( "');\">\n" ) ; html . append ( "\t\t</td>\n" ) ; html . append ( "\t\t<td style='width: 100%; '>\n\t\t\t" ) ; html . append ( component . getName ( ) ) ; html . append ( "\n\t\t</td>\n" ) ; html . append ( "\t</tr>\n" ) ; html . append ( "\t<tr>\n" ) ; html . append ( "\t\t<td> </td>\n" ) ; html . append ( "\t\t<td style='vertical-align: top; width: 100%; padding-bottom: 8px; font-style: italic;'>\n\t\t\t" ) ; html . append ( component . getDescription ( ) ) ; html . append ( "\n\t\t</td>\n" ) ; html . append ( "\t</tr>\n" ) ; return html . toString ( ) ; |
public class CmsFlexResponse { /** * This method is needed to process pages that can NOT be analyzed
* directly during delivering ( like JSP ) because they write to
* their own buffer . < p >
* In this case , we don ' t actually write output of include calls to the stream .
* Where there are include calls we write a < code > { @ link # FLEX _ CACHE _ DELIMITER } < / code > char on the stream
* to indicate that at this point the output of the include must be placed later .
* The include targets ( resource names ) are then saved in the m _ includeList . < p >
* This method must be called after the complete page has been processed .
* It will contain the output of the page only ( no includes ) ,
* with < code > { @ link # FLEX _ CACHE _ DELIMITER } < / code > chars were the include calls should be placed .
* What we do here is analyze the output and cut it in parts
* of < code > byte [ ] < / code > arrays which then are saved in the resulting cache entry .
* For the includes , we just save the name of the resource in
* the cache entry . < p >
* If caching is disabled this method is just not called . < p > */
private void processIncludeList ( ) { } } | byte [ ] result = getWriterBytes ( ) ; if ( ! hasIncludeList ( ) ) { // no include list , so no includes and we just use the bytes as they are in one block
m_cachedEntry . add ( result ) ; } else { // process the include list
int max = result . length ; int pos = 0 ; int last = 0 ; int size = 0 ; int count = 0 ; // work through result and split this with include list calls
int i = 0 ; while ( ( i < m_includeList . size ( ) ) && ( pos < max ) ) { // look for the first FLEX _ CACHE _ DELIMITER char
while ( ( pos < max ) && ( result [ pos ] != FLEX_CACHE_DELIMITER ) ) { pos ++ ; } if ( ( pos < max ) && ( result [ pos ] == FLEX_CACHE_DELIMITER ) ) { count ++ ; // a byte value of C _ FLEX _ CACHE _ DELIMITER in our ( String ) output list indicates
// that the next include call must be placed here
size = pos - last ; if ( size > 0 ) { // if not ( it might be 0 ) there would be 2 include calls back 2 back
byte [ ] piece = new byte [ size ] ; System . arraycopy ( result , last , piece , 0 , size ) ; // add the byte array to the cache entry
m_cachedEntry . add ( piece ) ; piece = null ; } last = ++ pos ; // add an include call to the cache entry
m_cachedEntry . add ( m_includeList . get ( i ) , m_includeListParameters . get ( i ) , m_includeListAttributes . get ( i ) ) ; i ++ ; } } if ( pos < max ) { // there is content behind the last include call
size = max - pos ; byte [ ] piece = new byte [ size ] ; System . arraycopy ( result , pos , piece , 0 , size ) ; m_cachedEntry . add ( piece ) ; piece = null ; } if ( i >= m_includeList . size ( ) ) { // clear the include list if all include calls are handled
m_includeList = null ; m_includeListParameters = null ; m_includeListAttributes = null ; } else { // if something is left , remove the processed entries
m_includeList = m_includeList . subList ( count , m_includeList . size ( ) ) ; m_includeListParameters = m_includeListParameters . subList ( count , m_includeListParameters . size ( ) ) ; m_includeListAttributes = m_includeListAttributes . subList ( count , m_includeListAttributes . size ( ) ) ; } } |
public class Metrics { /** * Generic method that posts a plugin to the metrics website */
private void postPlugin ( final boolean isPing ) throws IOException { } } | String serverVersion = getFullServerVersion ( ) ; int playersOnline = getPlayersOnline ( ) ; // END server software specific section - - all code below does not use any code outside of this class / Java
// Construct the post data
StringBuilder json = new StringBuilder ( 1024 ) ; json . append ( '{' ) ; // The plugin ' s description file containg all of the plugin data such as name , version , author , etc
appendJSONPair ( json , "guid" , guid ) ; appendJSONPair ( json , "plugin_version" , pluginVersion ) ; appendJSONPair ( json , "server_version" , serverVersion ) ; appendJSONPair ( json , "players_online" , Integer . toString ( playersOnline ) ) ; // New data as of R6
String osname = System . getProperty ( "os.name" ) ; String osarch = System . getProperty ( "os.arch" ) ; String osversion = System . getProperty ( "os.version" ) ; String java_version = System . getProperty ( "java.version" ) ; int coreCount = Runtime . getRuntime ( ) . availableProcessors ( ) ; // normalize os arch . . amd64 - > x86_64
if ( osarch . equals ( "amd64" ) ) { osarch = "x86_64" ; } appendJSONPair ( json , "osname" , osname ) ; appendJSONPair ( json , "osarch" , osarch ) ; appendJSONPair ( json , "osversion" , osversion ) ; appendJSONPair ( json , "cores" , Integer . toString ( coreCount ) ) ; // appendJSONPair ( json , " auth _ mode " , onlineMode ? " 1 " : " 0 " ) ;
appendJSONPair ( json , "java_version" , java_version ) ; // If we ' re pinging , append it
if ( isPing ) { appendJSONPair ( json , "ping" , "1" ) ; } if ( graphs . size ( ) > 0 ) { synchronized ( graphs ) { json . append ( ',' ) ; json . append ( '"' ) ; json . append ( "graphs" ) ; json . append ( '"' ) ; json . append ( ':' ) ; json . append ( '{' ) ; boolean firstGraph = true ; final Iterator < Graph > iter = graphs . iterator ( ) ; while ( iter . hasNext ( ) ) { Graph graph = iter . next ( ) ; StringBuilder graphJson = new StringBuilder ( ) ; graphJson . append ( '{' ) ; for ( Plotter plotter : graph . getPlotters ( ) ) { appendJSONPair ( graphJson , plotter . getColumnName ( ) , Integer . toString ( plotter . getValue ( ) ) ) ; } graphJson . append ( '}' ) ; if ( ! firstGraph ) { json . append ( ',' ) ; } json . append ( escapeJSON ( graph . getName ( ) ) ) ; json . append ( ':' ) ; json . append ( graphJson ) ; firstGraph = false ; } json . append ( '}' ) ; } } // close json
json . append ( '}' ) ; // Create the url
URL url = new URL ( BASE_URL + String . format ( REPORT_URL , urlEncode ( pluginName ) ) ) ; // Connect to the website
URLConnection connection ; // Mineshafter creates a socks proxy , so we can safely bypass it
// It does not reroute POST requests so we need to go around it
if ( isMineshafterPresent ( ) ) { connection = url . openConnection ( Proxy . NO_PROXY ) ; } else { connection = url . openConnection ( ) ; } byte [ ] uncompressed = json . toString ( ) . getBytes ( ) ; byte [ ] compressed = gzip ( json . toString ( ) ) ; // Headers
connection . addRequestProperty ( "User-Agent" , "MCStats/" + REVISION ) ; connection . addRequestProperty ( "Content-Type" , "application/json" ) ; connection . addRequestProperty ( "Content-Encoding" , "gzip" ) ; connection . addRequestProperty ( "Content-Length" , Integer . toString ( compressed . length ) ) ; connection . addRequestProperty ( "Accept" , "application/json" ) ; connection . addRequestProperty ( "Connection" , "close" ) ; connection . setDoOutput ( true ) ; if ( debug ) { System . out . println ( "[Metrics] Prepared request for " + pluginName + " uncompressed=" + uncompressed . length + " compressed=" + compressed . length ) ; } // Write the data
OutputStream os = connection . getOutputStream ( ) ; os . write ( compressed ) ; os . flush ( ) ; // Now read the response
final BufferedReader reader = new BufferedReader ( new InputStreamReader ( connection . getInputStream ( ) ) ) ; String response = reader . readLine ( ) ; // close resources
os . close ( ) ; reader . close ( ) ; if ( response == null || response . startsWith ( "ERR" ) || response . startsWith ( "7" ) ) { if ( response == null ) { response = "null" ; } else if ( response . startsWith ( "7" ) ) { response = response . substring ( response . startsWith ( "7," ) ? 2 : 1 ) ; } throw new IOException ( response ) ; } else { // Is this the first update this hour ?
if ( response . equals ( "1" ) || response . contains ( "This is your first update this hour" ) ) { synchronized ( graphs ) { final Iterator < Graph > iter = graphs . iterator ( ) ; while ( iter . hasNext ( ) ) { final Graph graph = iter . next ( ) ; for ( Plotter plotter : graph . getPlotters ( ) ) { plotter . reset ( ) ; } } } } } |
public class XesXmlSerializer { /** * Helper method , adds the given collection of attributes to the given Tag .
* @ param tag
* Tag to add attributes to .
* @ param attributes
* The attributes to add . */
protected void addAttributes ( SXTag tag , Collection < XAttribute > attributes ) throws IOException { } } | for ( XAttribute attribute : attributes ) { SXTag attributeTag ; if ( attribute instanceof XAttributeList ) { attributeTag = tag . addChildNode ( "list" ) ; attributeTag . addAttribute ( "key" , attribute . getKey ( ) ) ; } else if ( attribute instanceof XAttributeContainer ) { attributeTag = tag . addChildNode ( "container" ) ; attributeTag . addAttribute ( "key" , attribute . getKey ( ) ) ; } else if ( attribute instanceof XAttributeLiteral ) { attributeTag = tag . addChildNode ( "string" ) ; attributeTag . addAttribute ( "key" , attribute . getKey ( ) ) ; attributeTag . addAttribute ( "value" , attribute . toString ( ) ) ; } else if ( attribute instanceof XAttributeDiscrete ) { attributeTag = tag . addChildNode ( "int" ) ; attributeTag . addAttribute ( "key" , attribute . getKey ( ) ) ; attributeTag . addAttribute ( "value" , attribute . toString ( ) ) ; } else if ( attribute instanceof XAttributeContinuous ) { attributeTag = tag . addChildNode ( "float" ) ; attributeTag . addAttribute ( "key" , attribute . getKey ( ) ) ; attributeTag . addAttribute ( "value" , attribute . toString ( ) ) ; } else if ( attribute instanceof XAttributeTimestamp ) { attributeTag = tag . addChildNode ( "date" ) ; attributeTag . addAttribute ( "key" , attribute . getKey ( ) ) ; Date timestamp = ( ( XAttributeTimestamp ) attribute ) . getValue ( ) ; attributeTag . addAttribute ( "value" , xsDateTimeConversion . format ( timestamp ) ) ; } else if ( attribute instanceof XAttributeBoolean ) { attributeTag = tag . addChildNode ( "boolean" ) ; attributeTag . addAttribute ( "key" , attribute . getKey ( ) ) ; attributeTag . addAttribute ( "value" , attribute . toString ( ) ) ; } else if ( attribute instanceof XAttributeID ) { attributeTag = tag . addChildNode ( "id" ) ; attributeTag . addAttribute ( "key" , attribute . getKey ( ) ) ; attributeTag . addAttribute ( "value" , attribute . toString ( ) ) ; } else { throw new IOException ( "Unknown attribute type!" ) ; } if ( attribute instanceof XAttributeCollection ) { /* * Use order as specified by the collection . */
Collection < XAttribute > childAttributes = ( ( XAttributeCollection ) attribute ) . getCollection ( ) ; addAttributes ( attributeTag , childAttributes ) ; } else { addAttributes ( attributeTag , attribute . getAttributes ( ) . values ( ) ) ; } } |
public class FailureMarshaller { /** * Marshall the given parameter object . */
public void marshall ( Failure failure , ProtocolMarshaller protocolMarshaller ) { } } | if ( failure == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( failure . getArn ( ) , ARN_BINDING ) ; protocolMarshaller . marshall ( failure . getReason ( ) , REASON_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class AbstractJsonReader { /** * Parses the root bean .
* @ param input the JSON input
* @ param declaredType the declared type , not null
* @ return the bean , not null
* @ throws Exception if an error occurs */
< T > T parseRoot ( JsonInput input , Class < T > declaredType ) throws Exception { } } | this . input = input ; Object parsed = parseObject ( input . acceptEvent ( JsonEvent . OBJECT ) , declaredType , null , null , null , true ) ; return declaredType . cast ( parsed ) ; |
public class ConsistentColor { /** * Return the consistent RGB color value for the input .
* This method respects the color vision deficiency mode set by the user .
* @ param input input string ( for example username )
* @ param settings the settings for consistent color creation .
* @ return consistent color of that username as RGB values in range [ 0,1 ] . */
public static float [ ] RGBFrom ( CharSequence input , ConsistentColorSettings settings ) { } } | double angle = createAngle ( input ) ; double correctedAngle = applyColorDeficiencyCorrection ( angle , settings . getDeficiency ( ) ) ; double [ ] CbCr = angleToCbCr ( correctedAngle ) ; float [ ] rgb = CbCrToRGB ( CbCr , Y ) ; return rgb ; |
public class BaseNDArrayProxy { /** * Custom deserialization for Java serialization */
protected void read ( ObjectInputStream s ) throws IOException , ClassNotFoundException { } } | val header = BaseDataBuffer . readHeader ( s ) ; data = Nd4j . createBuffer ( header . getRight ( ) , length , false ) ; data . read ( s , header . getLeft ( ) , header . getMiddle ( ) , header . getRight ( ) ) ; |
public class PDBFileParser { /** * Handler for CONECT Record Format
* < pre >
* COLUMNS DATA TYPE FIELD DEFINITION
* 1 - 6 Record name " CONECT "
* 7 - 11 Integer serial Atom serial number
* 12 - 16 Integer serial Serial number of bonded atom
* 17 - 21 Integer serial Serial number of bonded atom
* 22 - 26 Integer serial Serial number of bonded atom
* 27 - 31 Integer serial Serial number of bonded atom
* 32 - 36 Integer serial Serial number of hydrogen bonded
* atom
* 37 - 41 Integer serial Serial number of hydrogen bonded
* atom
* 42 - 46 Integer serial Serial number of salt bridged
* atom
* 47 - 51 Integer serial Serial number of hydrogen bonded
* atom
* 52 - 56 Integer serial Serial number of hydrogen bonded
* atom
* 57 - 61 Integer serial Serial number of salt bridged
* atom
* < / pre > */
private void pdb_CONECT_Handler ( String line ) { } } | if ( atomOverflow ) { return ; } if ( params . isHeaderOnly ( ) ) { return ; } // this try . . catch is e . g . to catch 1gte which has wrongly formatted lines . . .
try { int atomserial = Integer . parseInt ( line . substring ( 6 , 11 ) . trim ( ) ) ; Integer bond1 = conect_helper ( line , 11 , 16 ) ; Integer bond2 = conect_helper ( line , 16 , 21 ) ; Integer bond3 = conect_helper ( line , 21 , 26 ) ; Integer bond4 = conect_helper ( line , 26 , 31 ) ; Integer hyd1 = conect_helper ( line , 31 , 36 ) ; Integer hyd2 = conect_helper ( line , 36 , 41 ) ; Integer salt1 = conect_helper ( line , 41 , 46 ) ; Integer hyd3 = conect_helper ( line , 46 , 51 ) ; Integer hyd4 = conect_helper ( line , 51 , 56 ) ; Integer salt2 = conect_helper ( line , 56 , 61 ) ; // System . out . println ( atomserial + " " + bond1 + " " + bond2 + " " + bond3 + " " + bond4 + " " +
// hyd1 + " " + hyd2 + " " + salt1 + " " + hyd3 + " " + hyd4 + " " + salt2 ) ;
HashMap < String , Integer > cons = new HashMap < String , Integer > ( ) ; cons . put ( "atomserial" , new Integer ( atomserial ) ) ; if ( bond1 != null ) cons . put ( "bond1" , bond1 ) ; if ( bond2 != null ) cons . put ( "bond2" , bond2 ) ; if ( bond3 != null ) cons . put ( "bond3" , bond3 ) ; if ( bond4 != null ) cons . put ( "bond4" , bond4 ) ; if ( hyd1 != null ) cons . put ( "hydrogen1" , hyd1 ) ; if ( hyd2 != null ) cons . put ( "hydrogen2" , hyd2 ) ; if ( salt1 != null ) cons . put ( "salt1" , salt1 ) ; if ( hyd3 != null ) cons . put ( "hydrogen3" , hyd3 ) ; if ( hyd4 != null ) cons . put ( "hydrogen4" , hyd4 ) ; if ( salt2 != null ) cons . put ( "salt2" , salt2 ) ; connects . add ( cons ) ; } catch ( NumberFormatException e ) { logger . info ( "could not parse CONECT line correctly (" + e . getMessage ( ) + "), at line : " + line ) ; return ; } |
public class Select { /** * Clear all selected entries . This is only valid when the SELECT supports multiple selections .
* @ throws UnsupportedOperationException If the SELECT does not support multiple selections */
@ Override public void deselectAll ( ) { } } | if ( ! isMultiple ( ) ) { throw new UnsupportedOperationException ( "You may only deselect all options of a multi-select" ) ; } for ( WebElement option : getOptions ( ) ) { setSelected ( option , false ) ; } |
public class ScanlineFiller { /** * Fills area starting at xx , yy . Pixels fullfilling target are replaced with
* replacement color .
* @ param xx
* @ param yy
* @ param target
* @ param replacement */
public void floodFill ( int xx , int yy , IntPredicate target , int replacement ) { } } | floodFill ( xx , yy , 0 , 0 , width , height , target , replacement ) ; |
public class XHTMLText { /** * Appends a tag that indicates the start of a new paragraph . This is usually rendered
* with two carriage returns , producing a single blank line in between the two paragraphs .
* @ param style the style of the paragraph
* @ return this . */
public XHTMLText appendOpenParagraphTag ( String style ) { } } | text . halfOpenElement ( P ) ; text . optAttribute ( STYLE , style ) ; text . rightAngleBracket ( ) ; return this ; |
public class TransformProcess { /** * Infer the categories for the given record reader for a particular column
* Note that each " column index " is a column in the context of :
* List < Writable > record = . . . ;
* record . get ( columnIndex ) ;
* Note that anything passed in as a column will be automatically converted to a
* string for categorical purposes .
* The * expected * input is strings or numbers ( which have sensible toString ( ) representations )
* Note that the returned categories will be sorted alphabetically
* @ param recordReader the record reader to iterate through
* @ param columnIndex te column index to get categories for
* @ return */
public static List < String > inferCategories ( RecordReader recordReader , int columnIndex ) { } } | Set < String > categories = new HashSet < > ( ) ; while ( recordReader . hasNext ( ) ) { List < Writable > next = recordReader . next ( ) ; categories . add ( next . get ( columnIndex ) . toString ( ) ) ; } // Sort categories alphabetically - HashSet and RecordReader orders are not deterministic in general
List < String > ret = new ArrayList < > ( categories ) ; Collections . sort ( ret ) ; return ret ; |
public class DraggableView { /** * Modify dragged view alpha based on the horizontal position while the view is being
* horizontally dragged . */
void changeDragViewViewAlpha ( ) { } } | if ( enableHorizontalAlphaEffect ) { float alpha = 1 - getHorizontalDragOffset ( ) ; if ( alpha == 0 ) { alpha = 1 ; } ViewHelper . setAlpha ( dragView , alpha ) ; } |
public class TrustedAdvisorCategorySpecificSummaryMarshaller { /** * Marshall the given parameter object . */
public void marshall ( TrustedAdvisorCategorySpecificSummary trustedAdvisorCategorySpecificSummary , ProtocolMarshaller protocolMarshaller ) { } } | if ( trustedAdvisorCategorySpecificSummary == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( trustedAdvisorCategorySpecificSummary . getCostOptimizing ( ) , COSTOPTIMIZING_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class CollapsedRequestSubject { /** * Set an ISE if a response is not yet received otherwise skip it
* @ param e A pre - generated exception . If this is null an ISE will be created and returned
* @ param exceptionMessage The message for the ISE */
public Exception setExceptionIfResponseNotReceived ( Exception e , String exceptionMessage ) { } } | Exception exception = e ; if ( ! valueSet . get ( ) && ! isTerminated ( ) ) { if ( e == null ) { exception = new IllegalStateException ( exceptionMessage ) ; } setExceptionIfResponseNotReceived ( exception ) ; } // return any exception that was generated
return exception ; |
public class ConfigRestClientUtil { /** * 根据路径更新文档
* https : / / www . elastic . co / guide / en / elasticsearch / reference / current / docs - update . html
* @ param path test / _ doc / 1
* test / _ doc / 1 / _ update
* @ param templateName
* @ return
* @ throws ElasticSearchException */
public String updateByPath ( String path , String templateName ) throws ElasticSearchException { } } | try { return this . client . executeHttp ( path , ESTemplateHelper . evalTemplate ( esUtil , templateName , ( Object ) null ) , ClientUtil . HTTP_POST ) ; } catch ( ElasticSearchException e ) { return ResultUtil . hand404HttpRuntimeException ( e , String . class , ResultUtil . OPERTYPE_updateDocument ) ; } |
public class StubAmpBean { /** * @ Override
* public void onSaveEnd ( boolean isComplete )
* SaveResult saveResult = new SaveResult ( result ) ;
* _ stubClass . checkpointStart ( this , saveResult . addBean ( ) ) ;
* onSaveChildren ( saveResult ) ;
* saveResult . completeBean ( ) ; */
@ Override public Object onLookup ( String path , ServiceRefAmp parentRef ) { } } | StubContainerAmp container = childContainer ( ) ; if ( container == null ) { return null ; } ServiceRef serviceRef = container . getService ( path ) ; if ( serviceRef != null ) { return serviceRef ; } Object value = _stubClass . onLookup ( this , path ) ; if ( value == null ) { return null ; } else if ( value instanceof ServiceRef ) { return value ; } else if ( value instanceof ProxyHandleAmp ) { ProxyHandleAmp handle = ( ProxyHandleAmp ) value ; return handle . __caucho_getServiceRef ( ) ; } else { ServicesAmp manager = parentRef . services ( ) ; String address = parentRef . address ( ) + path ; ServiceConfig config = null ; StubClassFactoryAmp stubFactory = manager . stubFactory ( ) ; StubAmp stub ; if ( value instanceof StubAmp ) { stub = ( StubAmp ) value ; } else { stub = stubFactory . stub ( value , address , path , container , config ) ; } serviceRef = parentRef . pin ( stub , address ) ; return container . addService ( path , serviceRef ) ; } |
public class DateTimeRange { /** * Sets the startDateTime value for this DateTimeRange .
* @ param startDateTime * The start date time of this range . This field is optional and
* if it is not set then there is no
* lower bound on the date time range . If this field
* is not set then { @ code endDateTime } must be
* specified . */
public void setStartDateTime ( com . google . api . ads . admanager . axis . v201811 . DateTime startDateTime ) { } } | this . startDateTime = startDateTime ; |
public class Tuple4 { /** * Split this tuple into two tuples of degree 1 and 3. */
public final Tuple2 < Tuple1 < T1 > , Tuple3 < T2 , T3 , T4 > > split1 ( ) { } } | return new Tuple2 < > ( limit1 ( ) , skip1 ( ) ) ; |
public class CFG { /** * Returns a collection of locations , ordered according to the compareTo
* ordering over locations . If you want to list all the locations in a CFG
* for debugging purposes , this is a good order to do so in .
* @ return collection of locations */
public Collection < Location > orderedLocations ( ) { } } | TreeSet < Location > tree = new TreeSet < > ( ) ; for ( Iterator < Location > locs = locationIterator ( ) ; locs . hasNext ( ) ; ) { Location loc = locs . next ( ) ; tree . add ( loc ) ; } return tree ; |
public class ValueEnforcer { /** * Check that the passed { @ link Collection } is neither < code > null < / code > nor
* empty .
* @ param < T >
* Type to be checked and returned
* @ param aValue
* The String to check .
* @ param aName
* The name of the value ( e . g . the parameter name )
* @ return The passed value .
* @ throws IllegalArgumentException
* if the passed value is empty */
@ CodingStyleguideUnaware public static < T extends Collection < ? > > T notEmpty ( final T aValue , @ Nonnull final Supplier < ? extends String > aName ) { } } | notNull ( aValue , aName ) ; if ( isEnabled ( ) ) if ( aValue . isEmpty ( ) ) throw new IllegalArgumentException ( "The value of the collection '" + aName . get ( ) + "' may not be empty!" ) ; return aValue ; |
public class QueryServiceImpl { /** * Throw an exception if the parameter is missing .
* @ param value Value which is checked for null .
* @ param name The short name of parameter .
* @ param description A one line description of the meaing of the parameter . */
private void requiredParameter ( String value , String name , String description ) throws WebApplicationException { } } | if ( value == null ) { throw new WebApplicationException ( Response . status ( Response . Status . BAD_REQUEST ) . type ( MediaType . TEXT_PLAIN ) . entity ( "missing required parameter '" + name + "' (" + description + ")" ) . build ( ) ) ; } |
public class FirestoreAdminClient { /** * Creates a composite index . This returns a
* [ google . longrunning . Operation ] [ google . longrunning . Operation ] which may be used to track the
* status of the creation . The metadata for the operation will be the type
* [ IndexOperationMetadata ] [ google . firestore . admin . v1 . IndexOperationMetadata ] .
* < p > Sample code :
* < pre > < code >
* try ( FirestoreAdminClient firestoreAdminClient = FirestoreAdminClient . create ( ) ) {
* ParentName parent = ParentName . of ( " [ PROJECT ] " , " [ DATABASE ] " , " [ COLLECTION _ ID ] " ) ;
* Index index = Index . newBuilder ( ) . build ( ) ;
* Operation response = firestoreAdminClient . createIndex ( parent . toString ( ) , index ) ;
* < / code > < / pre >
* @ param parent A parent name of the form
* ` projects / { project _ id } / databases / { database _ id } / collectionGroups / { collection _ id } `
* @ param index The composite index to create .
* @ throws com . google . api . gax . rpc . ApiException if the remote call fails */
public final Operation createIndex ( String parent , Index index ) { } } | CreateIndexRequest request = CreateIndexRequest . newBuilder ( ) . setParent ( parent ) . setIndex ( index ) . build ( ) ; return createIndex ( request ) ; |
public class ParticipantRepository { /** * Replies all the addresses from the inside of this repository .
* @ return the addresses in this repository . */
protected SynchronizedSet < ADDRESST > getAdresses ( ) { } } | final Object mutex = mutex ( ) ; synchronized ( mutex ) { return Collections3 . synchronizedSet ( this . listeners . keySet ( ) , mutex ) ; } |
public class FessMessages { /** * Add the created action message for the key ' errors . design _ file _ is _ unsupported _ type ' with parameters .
* < pre >
* message : The kind of file is unsupported .
* < / pre >
* @ param property The property name for the message . ( NotNull )
* @ return this . ( NotNull ) */
public FessMessages addErrorsDesignFileIsUnsupportedType ( String property ) { } } | assertPropertyNotNull ( property ) ; add ( property , new UserMessage ( ERRORS_design_file_is_unsupported_type ) ) ; return this ; |
public class MRCompactor { /** * A { @ link Dataset } should be verified if its not already compacted , and it satisfies the blacklist and whitelist . */
private boolean shouldVerifyCompletenessForDataset ( Dataset dataset , List < Pattern > blacklist , List < Pattern > whitelist ) { } } | boolean renamingRequired = this . state . getPropAsBoolean ( COMPACTION_RENAME_SOURCE_DIR_ENABLED , DEFAULT_COMPACTION_RENAME_SOURCE_DIR_ENABLED ) ; LOG . info ( "Should verify completeness with renaming source dir : " + renamingRequired ) ; return ! datasetAlreadyCompacted ( this . fs , dataset , renamingRequired ) && DatasetFilterUtils . survived ( dataset . getName ( ) , blacklist , whitelist ) ; |
public class DataSet { /** * Initiates a Full Outer Join transformation .
* < p > An Outer Join transformation joins two elements of two
* { @ link DataSet DataSets } on key equality and provides multiple ways to combine
* joining elements into one DataSet .
* < p > Elements of < b > both < / b > DataSets that do not have a matching
* element on the opposing side are joined with { @ code null } and emitted to the
* resulting DataSet .
* @ param other The other DataSet with which this DataSet is joined .
* @ param strategy The strategy that should be used execute the join . If { @ code null } is given , then the
* optimizer will pick the join strategy .
* @ return A JoinOperatorSet to continue the definition of the Join transformation .
* @ see org . apache . flink . api . java . operators . join . JoinOperatorSetsBase
* @ see DataSet */
public < R > JoinOperatorSetsBase < T , R > fullOuterJoin ( DataSet < R > other , JoinHint strategy ) { } } | switch ( strategy ) { case OPTIMIZER_CHOOSES : case REPARTITION_SORT_MERGE : case REPARTITION_HASH_FIRST : case REPARTITION_HASH_SECOND : return new JoinOperatorSetsBase < > ( this , other , strategy , JoinType . FULL_OUTER ) ; default : throw new InvalidProgramException ( "Invalid JoinHint for FullOuterJoin: " + strategy ) ; } |
public class MemoryTracker { /** * This method returns precise amount of free memory on specified device
* @ param deviceId
* @ return */
public long getPreciseFreeMemory ( int deviceId ) { } } | // we refresh free memory on device
val extFree = NativeOpsHolder . getInstance ( ) . getDeviceNativeOps ( ) . getDeviceFreeMemory ( deviceId ) ; // freePerDevice . get ( deviceId ) . set ( extFree ) ;
return extFree ; |
public class RequestProcessorChain { /** * < p > Accepts the { @ link InvocationContext } given to { @ link # run ( Object . . . ) } } the { @ link RequestProcessorChain }
* and translates the request metadata to a concrete instance of { @ link HttpRequestBase } . The
* { @ link HttpRequestBase } , together with the { @ link InvocationContext } is then given to the root link
* which runs the { @ link UriProcessor } and returns the resulting { @ link HttpRequestBase } . < / p >
* < p > See { @ link AbstractRequestProcessor } . < / p >
* { @ inheritDoc } */
@ Override protected HttpRequestBase onInitiate ( ProcessorChainLink < HttpRequestBase , RequestProcessorException > root , Object ... args ) { } } | InvocationContext context = assertAssignable ( assertNotEmpty ( args ) [ 0 ] , InvocationContext . class ) ; HttpRequestBase request = RequestUtils . translateRequestMethod ( context ) ; return root . getProcessor ( ) . run ( context , request ) ; // allow any exceptions to elevate to a chain - wide failure |
public class DuplicationMonitor { /** * Create the store manager to connect to this DuraCloud account instance */
private ContentStoreManager getStoreManager ( String host ) throws DBNotFoundException { } } | ContentStoreManager storeManager = new ContentStoreManagerImpl ( host , PORT , CONTEXT ) ; Credential credential = getRootCredential ( ) ; storeManager . login ( credential ) ; return storeManager ; |
public class SftpProtocolHandler { /** * { @ inheritDoc } */
@ Override public File downloadResource ( final String url , final String path ) throws ResourceDownloadError { } } | JSch jsch = new JSch ( ) ; String [ ] sftpPath = url . substring ( 7 ) . split ( "\\@" ) ; final String [ ] userCreds = sftpPath [ 0 ] . split ( "\\:" ) ; try { String host ; int port = ProtocolHandlerConstants . DEFAULT_SSH_PORT ; String filePath = sftpPath [ 1 ] . substring ( sftpPath [ 1 ] . indexOf ( '/' ) ) ; String [ ] location = sftpPath [ 1 ] . split ( "\\/" ) ; if ( location [ 0 ] . contains ( ":" ) ) { String [ ] hostPort = location [ 0 ] . split ( "\\:" ) ; host = hostPort [ 0 ] ; port = Integer . parseInt ( hostPort [ 1 ] ) ; } else { host = location [ 0 ] ; } if ( userCreds == null || userCreds . length == 0 ) { throw new UnsupportedOperationException ( "Non-specified user in sftp URL not supported yet." ) ; } Session session = jsch . getSession ( userCreds [ 0 ] , host , port ) ; session . setConfig ( "StrictHostKeyChecking" , "no" ) ; // don ' t validate against a known _ hosts file
session . setConfig ( "PreferredAuthentications" , "password,gssapi-with-mic,publickey,keyboard-interactive" ) ; if ( userCreds . length == 1 ) { session . setUserInfo ( ui ) ; } else { session . setPassword ( userCreds [ 1 ] ) ; } session . connect ( ) ; Channel channel = session . openChannel ( "sftp" ) ; channel . connect ( ) ; ChannelSftp c = ( ChannelSftp ) channel ; File downloadFile = new File ( path ) ; FileOutputStream tempFileOutputStream = new FileOutputStream ( downloadFile ) ; IOUtils . copy ( c . get ( filePath ) , tempFileOutputStream ) ; channel . disconnect ( ) ; session . disconnect ( ) ; return downloadFile ; } catch ( Exception e ) { final String msg = "Error downloading namespace" ; throw new ResourceDownloadError ( url , msg , e ) ; } |
public class ClassLoadingMetricSet { /** * Registers all the metrics in this metric pack .
* @ param metricsRegistry the MetricsRegistry upon which the metrics are registered . */
public static void register ( MetricsRegistry metricsRegistry ) { } } | checkNotNull ( metricsRegistry , "metricsRegistry" ) ; ClassLoadingMXBean mxBean = ManagementFactory . getClassLoadingMXBean ( ) ; metricsRegistry . register ( mxBean , "classloading.loadedClassesCount" , MANDATORY , ClassLoadingMXBean :: getLoadedClassCount ) ; metricsRegistry . register ( mxBean , "classloading.totalLoadedClassesCount" , MANDATORY , ClassLoadingMXBean :: getTotalLoadedClassCount ) ; metricsRegistry . register ( mxBean , "classloading.unloadedClassCount" , MANDATORY , ClassLoadingMXBean :: getUnloadedClassCount ) ; |
public class TcpClientExample { /** * / * Thread , which sends characters and prints received responses to the console . */
private Thread getScannerThread ( ) { } } | return new Thread ( ( ) -> { Scanner scanIn = new Scanner ( System . in ) ; while ( true ) { String line = scanIn . nextLine ( ) ; if ( line . isEmpty ( ) ) { break ; } ByteBuf buf = ByteBuf . wrapForReading ( encodeAscii ( line + "\r\n" ) ) ; eventloop . execute ( ( ) -> socket . write ( buf ) ) ; } eventloop . execute ( socket :: close ) ; } ) ; |
public class LineItemServiceLocator { /** * For the given interface , get the stub implementation .
* If this service has no port for the given interface ,
* then ServiceException is thrown . */
public java . rmi . Remote getPort ( Class serviceEndpointInterface ) throws javax . xml . rpc . ServiceException { } } | try { if ( com . google . api . ads . admanager . axis . v201902 . LineItemServiceInterface . class . isAssignableFrom ( serviceEndpointInterface ) ) { com . google . api . ads . admanager . axis . v201902 . LineItemServiceSoapBindingStub _stub = new com . google . api . ads . admanager . axis . v201902 . LineItemServiceSoapBindingStub ( new java . net . URL ( LineItemServiceInterfacePort_address ) , this ) ; _stub . setPortName ( getLineItemServiceInterfacePortWSDDServiceName ( ) ) ; return _stub ; } } catch ( java . lang . Throwable t ) { throw new javax . xml . rpc . ServiceException ( t ) ; } throw new javax . xml . rpc . ServiceException ( "There is no stub implementation for the interface: " + ( serviceEndpointInterface == null ? "null" : serviceEndpointInterface . getName ( ) ) ) ; |
public class AtomContainer { /** * { @ inheritDoc } */
@ Override public IBond removeBond ( int position ) { } } | IBond bond = bonds [ position ] ; for ( int i = position ; i < bondCount - 1 ; i ++ ) { bonds [ i ] = bonds [ i + 1 ] ; } bonds [ bondCount - 1 ] = null ; bondCount -- ; return bond ; |
public class DailyTimeIntervalTrigger { /** * Returns the final time at which the < code > DailyTimeIntervalTrigger < / code >
* will fire , if there is no end time set , null will be returned .
* Note that the return time may be in the past . */
@ Override public Date getFinalFireTime ( ) { } } | if ( m_bComplete || getEndTime ( ) == null ) { return null ; } // We have an endTime , we still need to check to see if there is a
// endTimeOfDay if that ' s applicable .
Date eTime = getEndTime ( ) ; if ( m_aEndTimeOfDay != null ) { final Date endTimeOfDayDate = m_aEndTimeOfDay . getTimeOfDayForDate ( eTime ) ; if ( eTime . getTime ( ) < endTimeOfDayDate . getTime ( ) ) { eTime = endTimeOfDayDate ; } } return eTime ; |
public class StoreConfig { /** * Saves configuration to a properties file in a format suitable for using { { @ link # load ( File ) } .
* @ param propertiesFile - a configuration properties file
* @ param comments - a description of the configuration
* @ throws IOException */
public void save ( File propertiesFile , String comments ) throws IOException { } } | FileWriter writer = new FileWriter ( propertiesFile ) ; _properties . store ( writer , comments ) ; writer . close ( ) ; |
public class SeqServerGroup { /** * The substring method will create a copy of the substring in JDK 8 and probably newer
* versions . To reduce the number of allocations we use a char buffer to return a view
* with just that subset . */
private static CharSequence substr ( CharSequence str , int s , int e ) { } } | return ( s >= e ) ? null : CharBuffer . wrap ( str , s , e ) ; |
public class CreateClusterRequest { /** * Sets the type of storage used by this cluster to serve its parent instance ' s tables . Defaults
* to { @ code SSD } . */
@ SuppressWarnings ( "WeakerAccess" ) public CreateClusterRequest setStorageType ( @ Nonnull StorageType storageType ) { } } | Preconditions . checkNotNull ( storageType ) ; Preconditions . checkArgument ( storageType != StorageType . UNRECOGNIZED , "StorageType can't be UNRECOGNIZED" ) ; proto . getClusterBuilder ( ) . setDefaultStorageType ( storageType . toProto ( ) ) ; return this ; |
public class Configuration { /** * The matchers used to authorize the incoming requests in function of the referer . For example :
* < pre > < code >
* allowedReferers :
* - ! hostnameMatch
* host : example . com
* allowSubDomains : true
* < / code > < / pre >
* By default , the referer is not checked
* @ param matchers the list of matcher to use to check if a referer is permitted or null for no
* check
* @ see org . mapfish . print . processor . http . matcher . URIMatcher */
public final void setAllowedReferers ( @ Nullable final List < ? extends URIMatcher > matchers ) { } } | this . allowedReferers = matchers != null ? new UriMatchers ( matchers ) : null ; |
public class Packer { /** * Resize input buffer to newsize
* @ param buf
* @ param newsize
* @ return */
static final byte [ ] resizeBuffer ( final byte [ ] buf , final int newsize ) { } } | if ( buf . length == newsize ) return buf ; final byte [ ] newbuf = new byte [ newsize ] ; System . arraycopy ( buf , 0 , newbuf , 0 , Math . min ( buf . length , newbuf . length ) ) ; return newbuf ; |
public class SingleNodeCrossover { /** * The static method makes it easier to test . */
static < A > int swap ( final TreeNode < A > that , final TreeNode < A > other ) { } } | assert that != null ; assert other != null ; final Random random = RandomRegistry . getRandom ( ) ; final ISeq < TreeNode < A > > seq1 = that . breadthFirstStream ( ) . collect ( ISeq . toISeq ( ) ) ; final ISeq < TreeNode < A > > seq2 = other . breadthFirstStream ( ) . collect ( ISeq . toISeq ( ) ) ; final int changed ; if ( seq1 . length ( ) > 1 && seq2 . length ( ) > 1 ) { final TreeNode < A > n1 = seq1 . get ( random . nextInt ( seq1 . length ( ) - 1 ) + 1 ) ; final TreeNode < A > p1 = n1 . getParent ( ) . orElseThrow ( AssertionError :: new ) ; final TreeNode < A > n2 = seq2 . get ( random . nextInt ( seq2 . length ( ) - 1 ) + 1 ) ; final TreeNode < A > p2 = n2 . getParent ( ) . orElseThrow ( AssertionError :: new ) ; final int i1 = p1 . getIndex ( n1 ) ; final int i2 = p2 . getIndex ( n2 ) ; p1 . insert ( i1 , n2 . detach ( ) ) ; p2 . insert ( i2 , n1 . detach ( ) ) ; changed = 2 ; } else { changed = 0 ; } return changed ; |
public class HtmlMessages { /** * < p > Return the value of the < code > warnClass < / code > property . < / p >
* < p > Contents : CSS style class to apply to any message
* with a severity class of " WARN " . */
public java . lang . String getWarnClass ( ) { } } | return ( java . lang . String ) getStateHelper ( ) . eval ( PropertyKeys . warnClass ) ; |
public class Logos { /** * { @ inheritDoc } */
@ Override public List < Integer > getIds ( ) { } } | List < Integer > result = new ArrayList < > ( ) ; for ( Logo logo : this ) { result . add ( logo . getNid ( ) ) ; } return result ; |
public class WhileyFileParser { /** * Parse a do - while statement , which has the form :
* < pre >
* DoWhileStmt : : = " do " ' : ' NewLine Block " where " Expr ( " where " Expr ) *
* < / pre >
* @ see wyc . lang . Stmt . DoWhile
* @ param scope
* The enclosing scope for this statement , which determines the
* set of visible ( i . e . declared ) variables and also the current
* indentation level .
* @ return
* @ author David J . Pearce */
private Stmt parseDoWhileStatement ( EnclosingScope scope ) { } } | int start = index ; match ( Do ) ; match ( Colon ) ; int end = index ; matchEndLine ( ) ; // match the block
Stmt . Block blk = parseBlock ( scope , true ) ; // match while and condition
match ( While ) ; Expr condition = parseLogicalExpression ( scope , false ) ; // Parse the loop invariants
Tuple < Expr > invariant = parseInvariant ( scope , Where ) ; matchEndLine ( ) ; return annotateSourceLocation ( new Stmt . DoWhile ( condition , invariant , new Tuple < > ( ) , blk ) , start , end - 1 ) ; |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.