signature
stringlengths 43
39.1k
| implementation
stringlengths 0
450k
|
|---|---|
public class Vulnerability { /** * Returns the list of references . This is primarily used within the
* generated reports .
* @ param sorted whether the returned list should be sorted
* @ return the list of references */
public List < Reference > getReferences ( boolean sorted ) { } }
|
final List < Reference > sortedRefs = new ArrayList < > ( this . references ) ; if ( sorted ) { Collections . sort ( sortedRefs ) ; } return sortedRefs ;
|
public class SignMgrImpl { /** * 验证密码是否正确
* @ param userPassword
* @ param passwordToBeValidate
* @ return */
public boolean validate ( String userPassword , String passwordToBeValidate ) { } }
|
String data = SignUtils . createPassword ( passwordToBeValidate ) ; if ( data . equals ( userPassword ) ) { return true ; } else { return false ; }
|
public class Cache { /** * 同时将多个 field - value ( 域 - 值 ) 对设置到哈希表 key 中 。
* 此命令会覆盖哈希表中已存在的域 。
* 如果 key 不存在 , 一个空哈希表被创建并执行 HMSET 操作 。 */
public String hmset ( Object key , Map < Object , Object > hash ) { } }
|
Jedis jedis = getJedis ( ) ; try { Map < byte [ ] , byte [ ] > para = new HashMap < byte [ ] , byte [ ] > ( ) ; for ( Entry < Object , Object > e : hash . entrySet ( ) ) para . put ( fieldToBytes ( e . getKey ( ) ) , valueToBytes ( e . getValue ( ) ) ) ; return jedis . hmset ( keyToBytes ( key ) , para ) ; } finally { close ( jedis ) ; }
|
public class JsonObjectDecoder { /** * Override this method if you want to filter the json objects / arrays that get passed through the pipeline . */
@ SuppressWarnings ( "UnusedParameters" ) protected ByteBuf extractObject ( ChannelHandlerContext ctx , ByteBuf buffer , int index , int length ) { } }
|
return buffer . retainedSlice ( index , length ) ;
|
public class ModelGenerator { /** * Creates a model object ( JS code ) based on the provided ModelBean and writes it into
* the response .
* @ param request the http servlet request
* @ param response the http servlet response
* @ param model { @ link ModelBean } describing the model to be generated
* @ param format specifies which code ( ExtJS or Touch ) the generator should create .
* @ param debug if true the generator creates the output in pretty format , false the
* output is compressed
* @ throws IOException */
public static void writeModel ( HttpServletRequest request , HttpServletResponse response , ModelBean model , OutputFormat format , boolean debug ) throws IOException { } }
|
OutputConfig outputConfig = new OutputConfig ( ) ; outputConfig . setDebug ( debug ) ; outputConfig . setOutputFormat ( format ) ; writeModel ( request , response , model , outputConfig ) ;
|
public class RedGBuilder { /** * Sets the dummy factory
* @ param dummyFactory The dummy factory
* @ return The builder itself
* @ see AbstractRedG # setDummyFactory ( DummyFactory ) */
public RedGBuilder < T > withDummyFactory ( final DummyFactory dummyFactory ) { } }
|
if ( instance == null ) { throw new IllegalStateException ( "Using the builder after build() was called is not allowed!" ) ; } instance . setDummyFactory ( dummyFactory ) ; return this ;
|
public class SourceService { /** * Returns a range of lines as raw db data . User permission is not verified .
* @ param from starts from 1
* @ param toInclusive starts from 1 , must be greater than or equal param { @ code from } */
public Optional < Iterable < DbFileSources . Line > > getLines ( DbSession dbSession , String fileUuid , int from , int toInclusive ) { } }
|
return getLines ( dbSession , fileUuid , from , toInclusive , Function . identity ( ) ) ;
|
public class IO { /** * Close given InputStream , ignoring any resulting exception .
* @ param inputStream
* the InputStream to close ; may be null ( in which case nothing
* happens ) */
public static void close ( @ CheckForNull InputStream inputStream ) { } }
|
if ( inputStream == null ) { return ; } try { inputStream . close ( ) ; } catch ( IOException e ) { // Ignore
}
|
public class LaxHttpParser { /** * Read up to < tt > " \ n " < / tt > from an ( unchunked ) input stream .
* If the stream ends before the line terminator is found ,
* the last part of the string will still be returned .
* If no input data available , < code > null < / code > is returned .
* @ param inputStream the stream to read from
* @ param charset charset of HTTP protocol elements
* @ throws IOException if an I / O problem occurs
* @ return a line from the stream
* @ since 3.0 */
public static String readLine ( InputStream inputStream , String charset ) throws IOException { } }
|
LOG . trace ( "enter LaxHttpParser.readLine(InputStream, String)" ) ; byte [ ] rawdata = readRawLine ( inputStream ) ; if ( rawdata == null ) { return null ; } // strip CR and LF from the end
int len = rawdata . length ; int offset = 0 ; if ( len > 0 ) { if ( rawdata [ len - 1 ] == '\n' ) { offset ++ ; if ( len > 1 ) { if ( rawdata [ len - 2 ] == '\r' ) { offset ++ ; } } } } return EncodingUtil . getString ( rawdata , 0 , len - offset , charset ) ;
|
public class MusicOnHoldApi { /** * Upload WAV file to MOH .
* Upload the specified WAV file to the MOH .
* @ param musicFile The musicFile file for uploading to MOH . ( required )
* @ return SendMOHFilesResponse
* @ throws ApiException If fail to call the API , e . g . server error or cannot deserialize the response body */
public SendMOHFilesResponse sendMOHFiles ( File musicFile ) throws ApiException { } }
|
ApiResponse < SendMOHFilesResponse > resp = sendMOHFilesWithHttpInfo ( musicFile ) ; return resp . getData ( ) ;
|
public class AntTaskRunner { /** * < p > execute . < / p >
* @ throws org . apache . tools . ant . BuildException if any . */
public void execute ( ) throws BuildException { } }
|
try { verboseHeaderToLog ( ) ; List < String > args = buildCommandLine ( ) ; verboseCommandLineToLog ( args ) ; doRun ( args ) ; } catch ( BuildException ex ) { throw ex ; } catch ( Exception ex ) { throw new BuildException ( "AntTaskRunner" , ex ) ; }
|
public class SipConnectorService { /** * { @ inheritDoc } */
public synchronized Connector getValue ( ) throws IllegalStateException { } }
|
final Connector connector = this . connector ; if ( connector == null ) { throw MESSAGES . nullValue ( ) ; } return connector ;
|
public class IniFile { /** * save back content to ini file
* @ throws IOException */
public void save ( ) throws IOException { } }
|
if ( ! file . exists ( ) ) file . createFile ( true ) ; OutputStream out = IOUtil . toBufferedOutputStream ( file . getOutputStream ( ) ) ; Iterator it = sections . keySet ( ) . iterator ( ) ; PrintWriter output = new PrintWriter ( out ) ; try { while ( it . hasNext ( ) ) { String strSection = ( String ) it . next ( ) ; output . println ( "[" + strSection + "]" ) ; Map section = getSectionEL ( strSection ) ; Iterator iit = section . keySet ( ) . iterator ( ) ; while ( iit . hasNext ( ) ) { String key = ( String ) iit . next ( ) ; output . println ( key + "=" + section . get ( key ) ) ; } } } finally { IOUtil . flushEL ( output ) ; IOUtil . closeEL ( output ) ; IOUtil . flushEL ( out ) ; IOUtil . closeEL ( out ) ; }
|
public class IsValid { /** * regex check
* @ param pc
* @ param type
* @ param value
* @ param objPattern
* @ return
* @ throws PageException */
public static boolean call ( PageContext pc , String type , Object value , Object objPattern ) throws PageException { } }
|
type = type . trim ( ) ; if ( ! "regex" . equalsIgnoreCase ( type ) && ! "regular_expression" . equalsIgnoreCase ( type ) ) throw new FunctionException ( pc , "isValid" , 1 , "type" , "wrong attribute count for type [" + type + "]" ) ; return regex ( Caster . toString ( value , null ) , Caster . toString ( objPattern ) ) ;
|
public class RegexTool { /** * Compiles a regular expression into a java { @ code Pattern } object .
* @ param regex the textual representation of the regular expression
* @ return the { @ code Pattern } object corresponding to the regular expression , or { @ code null } if the expression is
* invalid
* @ since 2.3M1 */
public Pattern compile ( String regex ) { } }
|
try { return Pattern . compile ( regex ) ; } catch ( PatternSyntaxException ex ) { return null ; }
|
public class PDBusinessCard { /** * This method clones all values from < code > this < / code > to the passed object .
* All data in the parameter object is overwritten !
* @ param ret
* The target object to clone to . May not be < code > null < / code > . */
public void cloneTo ( @ Nonnull final PDBusinessCard ret ) { } }
|
ret . m_aParticipantIdentifier = m_aParticipantIdentifier ; ret . m_aEntities = new CommonsArrayList < > ( m_aEntities , PDBusinessEntity :: getClone ) ;
|
public class SchemaFacetMarshaller { /** * Marshall the given parameter object . */
public void marshall ( SchemaFacet schemaFacet , ProtocolMarshaller protocolMarshaller ) { } }
|
if ( schemaFacet == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( schemaFacet . getSchemaArn ( ) , SCHEMAARN_BINDING ) ; protocolMarshaller . marshall ( schemaFacet . getFacetName ( ) , FACETNAME_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
|
public class InternalXbaseParser { /** * InternalXbase . g : 1283:1 : entryRuleXBooleanLiteral : ruleXBooleanLiteral EOF ; */
public final void entryRuleXBooleanLiteral ( ) throws RecognitionException { } }
|
try { // InternalXbase . g : 1284:1 : ( ruleXBooleanLiteral EOF )
// InternalXbase . g : 1285:1 : ruleXBooleanLiteral EOF
{ if ( state . backtracking == 0 ) { before ( grammarAccess . getXBooleanLiteralRule ( ) ) ; } pushFollow ( FOLLOW_1 ) ; ruleXBooleanLiteral ( ) ; state . _fsp -- ; if ( state . failed ) return ; if ( state . backtracking == 0 ) { after ( grammarAccess . getXBooleanLiteralRule ( ) ) ; } match ( input , EOF , FOLLOW_2 ) ; if ( state . failed ) return ; } } catch ( RecognitionException re ) { reportError ( re ) ; recover ( input , re ) ; } finally { } return ;
|
public class ParserUtils { /** * method to skip given amount of bytes in stream
* @ param size
* - size to skip
* @ param input
* - input stream to process
* @ throws IOException
* - in case of IO error */
public static void skip ( long size , InputStream input ) throws IOException { } }
|
while ( size > 0 ) { size -= input . skip ( size ) ; }
|
public class PathFileObject { /** * Create a PathFileObject within a directory , such that the binary name
* can be inferred from the relationship to the parent directory . */
static PathFileObject createDirectoryPathFileObject ( JavacPathFileManager fileManager , final Path path , final Path dir ) { } }
|
return new PathFileObject ( fileManager , path ) { @ Override String inferBinaryName ( Iterable < ? extends Path > paths ) { return toBinaryName ( dir . relativize ( path ) ) ; } } ;
|
public class N1qlParams { /** * Sets the { @ link Document } s resulting of a mutation this query should be consistent with .
* @ param documents the documents returned from a mutation .
* @ return this { @ link N1qlParams } for chaining . */
@ InterfaceStability . Committed public N1qlParams consistentWith ( Document ... documents ) { } }
|
return consistentWith ( MutationState . from ( documents ) ) ;
|
public class ClassLoaderLeakPreventorFactory { /** * Add new { @ link I } entry to { @ code map } , taking { @ link MustBeAfter } into account */
private < I > void addConsideringOrder ( Map < String , I > map , I newEntry ) { } }
|
for ( Map . Entry < String , I > entry : map . entrySet ( ) ) { if ( entry . getValue ( ) instanceof MustBeAfter < ? > ) { final Class < ? extends ClassLoaderPreMortemCleanUp > [ ] existingMustBeAfter = ( ( MustBeAfter < ClassLoaderPreMortemCleanUp > ) entry . getValue ( ) ) . mustBeBeforeMe ( ) ; for ( Class < ? extends ClassLoaderPreMortemCleanUp > clazz : existingMustBeAfter ) { if ( clazz . isAssignableFrom ( newEntry . getClass ( ) ) ) { // Entry needs to be after new entry
// TODO Resolve order automatically # 51
throw new IllegalStateException ( clazz . getName ( ) + " must be added after " + newEntry . getClass ( ) ) ; } } } } map . put ( newEntry . getClass ( ) . getName ( ) , newEntry ) ;
|
public class ModelHelper { /** * Helper to convert retention policy from RPC call to internal representation .
* @ param policy The retention policy from RPC interface .
* @ return New instance of RetentionPolicy . */
public static final RetentionPolicy encode ( final Controller . RetentionPolicy policy ) { } }
|
// Using default enum type of UNKNOWN ( 0 ) to detect if retention policy has been set or not .
// This is required since proto3 does not have any other way to detect if a field has been set or not .
if ( policy != null && policy . getRetentionType ( ) != Controller . RetentionPolicy . RetentionPolicyType . UNKNOWN ) { return RetentionPolicy . builder ( ) . retentionType ( RetentionPolicy . RetentionType . valueOf ( policy . getRetentionType ( ) . name ( ) ) ) . retentionParam ( policy . getRetentionParam ( ) ) . build ( ) ; } else { return null ; }
|
public class AbstractCommandLineRunner { /** * Processes the results of the compile job , and returns an error code . */
@ GwtIncompatible ( "Unnecessary" ) int processResults ( Result result , List < JSModule > modules , B options ) throws IOException { } }
|
if ( config . printPassGraph ) { if ( compiler . getRoot ( ) == null ) { return 1 ; } else { Appendable jsOutput = createDefaultOutput ( ) ; jsOutput . append ( DotFormatter . toDot ( compiler . getPassConfig ( ) . getPassGraph ( ) ) ) ; jsOutput . append ( '\n' ) ; closeAppendable ( jsOutput ) ; return 0 ; } } if ( config . printAst ) { if ( compiler . getRoot ( ) == null ) { return 1 ; } else { Appendable jsOutput = createDefaultOutput ( ) ; ControlFlowGraph < Node > cfg = compiler . computeCFG ( ) ; DotFormatter . appendDot ( compiler . getRoot ( ) . getLastChild ( ) , cfg , jsOutput ) ; jsOutput . append ( '\n' ) ; closeAppendable ( jsOutput ) ; return 0 ; } } if ( config . printTree ) { if ( compiler . getRoot ( ) == null ) { compiler . report ( JSError . make ( NO_TREE_GENERATED_ERROR ) ) ; return 1 ; } else { Appendable jsOutput = createDefaultOutput ( ) ; compiler . getRoot ( ) . appendStringTree ( jsOutput ) ; jsOutput . append ( "\n" ) ; closeAppendable ( jsOutput ) ; return 0 ; } } if ( config . skipNormalOutputs ) { // Output the manifest and bundle files if requested .
outputManifest ( ) ; outputBundle ( ) ; outputModuleGraphJson ( ) ; return 0 ; } else if ( options . outputJs != OutputJs . NONE && result . success ) { outputModuleGraphJson ( ) ; if ( modules == null ) { outputSingleBinary ( options ) ; // Output the source map if requested .
// If output files are being written to stdout as a JSON string ,
// outputSingleBinary will have added the sourcemap to the output file
if ( ! isOutputInJson ( ) ) { outputSourceMap ( options , config . jsOutputFile ) ; } } else { DiagnosticType error = outputModuleBinaryAndSourceMaps ( compiler . getModules ( ) , options ) ; if ( error != null ) { compiler . report ( JSError . make ( error ) ) ; return 1 ; } } // Output the externs if required .
if ( options . externExportsPath != null ) { try ( Writer eeOut = openExternExportsStream ( options , config . jsOutputFile ) ) { eeOut . append ( result . externExport ) ; } } // Output the variable and property name maps if requested .
outputNameMaps ( ) ; // Output the ReplaceStrings map if requested
outputStringMap ( ) ; // Output the manifest and bundle files if requested .
outputManifest ( ) ; outputBundle ( ) ; if ( isOutputInJson ( ) ) { outputJsonStream ( ) ; } } // return 0 if no errors , the error count otherwise
return Math . min ( result . errors . size ( ) , 0x7f ) ;
|
public class CpCommand { /** * Copies a list of files or directories specified by srcPaths to the destination specified by
* dstPath . This method is used when the original source path contains wildcards .
* @ param srcPaths a list of files or directories in the Alluxio filesystem
* @ param dstPath the destination in the Alluxio filesystem
* @ param recursive indicates whether directories should be copied recursively */
private void copyWildcard ( List < AlluxioURI > srcPaths , AlluxioURI dstPath , boolean recursive ) throws AlluxioException , IOException { } }
|
URIStatus dstStatus = null ; try { dstStatus = mFileSystem . getStatus ( dstPath ) ; } catch ( FileDoesNotExistException e ) { // if the destination does not exist , it will be created
} if ( dstStatus != null && ! dstStatus . isFolder ( ) ) { throw new InvalidPathException ( ExceptionMessage . DESTINATION_CANNOT_BE_FILE . getMessage ( ) ) ; } if ( dstStatus == null ) { mFileSystem . createDirectory ( dstPath ) ; System . out . println ( "Created directory: " + dstPath ) ; } List < String > errorMessages = new ArrayList < > ( ) ; for ( AlluxioURI srcPath : srcPaths ) { try { copy ( srcPath , new AlluxioURI ( dstPath . getScheme ( ) , dstPath . getAuthority ( ) , PathUtils . concatPath ( dstPath . getPath ( ) , srcPath . getName ( ) ) ) , recursive ) ; } catch ( AlluxioException | IOException e ) { errorMessages . add ( e . getMessage ( ) ) ; } } if ( errorMessages . size ( ) != 0 ) { throw new IOException ( Joiner . on ( '\n' ) . join ( errorMessages ) ) ; }
|
public class Record { /** * Creates a new record with the ID and the types specified ( property { @ code rdf : type } ) , and
* no additional properties .
* @ param id
* the ID of the new record , possibly null in order not to assign it
* @ param types
* the types of the record , assigned to property { @ code rdf : type }
* @ return the created record */
public static Record create ( final URI id , final URI ... types ) { } }
|
final Record record = new Record ( id ) ; if ( types . length > 0 ) { record . set ( RDF . TYPE , types ) ; } return record ;
|
public class GlobalOperationClient { /** * Retrieves an aggregated list of all operations .
* < p > Sample code :
* < pre > < code >
* try ( GlobalOperationClient globalOperationClient = GlobalOperationClient . create ( ) ) {
* ProjectName project = ProjectName . of ( " [ PROJECT ] " ) ;
* for ( OperationsScopedList element : globalOperationClient . aggregatedListGlobalOperations ( project ) . iterateAll ( ) ) {
* / / doThingsWith ( element ) ;
* < / code > < / pre >
* @ param project Project ID for this request .
* @ throws com . google . api . gax . rpc . ApiException if the remote call fails */
@ BetaApi public final AggregatedListGlobalOperationsPagedResponse aggregatedListGlobalOperations ( ProjectName project ) { } }
|
AggregatedListGlobalOperationsHttpRequest request = AggregatedListGlobalOperationsHttpRequest . newBuilder ( ) . setProject ( project == null ? null : project . toString ( ) ) . build ( ) ; return aggregatedListGlobalOperations ( request ) ;
|
public class CollisionFormulaConfig { /** * Create the formula data from node .
* @ param config The collision formulas descriptor ( must not be < code > null < / code > ) .
* @ return The collision formula data .
* @ throws LionEngineException If error when reading data . */
public static CollisionFormulaConfig imports ( Media config ) { } }
|
final Xml root = new Xml ( config ) ; final Map < String , CollisionFormula > collisions = new HashMap < > ( 0 ) ; for ( final Xml node : root . getChildren ( NODE_FORMULA ) ) { final String name = node . readString ( ATT_NAME ) ; final CollisionFormula collision = createCollision ( node ) ; collisions . put ( name , collision ) ; } return new CollisionFormulaConfig ( collisions ) ;
|
public class ByExampleUtil { /** * Construct a join predicate on collection ( eg many to many , List ) */
public < T > List < Predicate > byExampleOnXToMany ( ManagedType < T > mt , Root < T > mtPath , T mtValue , SearchParameters sp , CriteriaBuilder builder ) { } }
|
List < Predicate > predicates = newArrayList ( ) ; for ( PluralAttribute < ? super T , ? , ? > pa : mt . getPluralAttributes ( ) ) { if ( pa . getCollectionType ( ) == PluralAttribute . CollectionType . LIST ) { List < ? > values = ( List < ? > ) jpaUtil . getValue ( mtValue , mt . getAttribute ( pa . getName ( ) ) ) ; if ( values != null && ! values . isEmpty ( ) ) { if ( sp . getUseAndInXToMany ( ) ) { if ( values . size ( ) > 3 ) { log . warning ( "Please note that using AND restriction on an Many to Many relationship requires as many joins as values" ) ; } for ( Object value : values ) { ListJoin < T , ? > join = mtPath . join ( mt . getList ( pa . getName ( ) ) ) ; predicates . add ( join . in ( value ) ) ; } } else { ListJoin < T , ? > join = mtPath . join ( mt . getList ( pa . getName ( ) ) ) ; predicates . add ( join . in ( values ) ) ; } } } } return predicates ;
|
public class DB { /** * Adds a term at the end of the current terms if it does not already exist .
* @ param postId The post id .
* @ param taxonomyTerm The taxonomy term to add .
* @ return Was the term added ?
* @ throws SQLException on database error . */
public boolean addPostTerm ( final long postId , final TaxonomyTerm taxonomyTerm ) throws SQLException { } }
|
List < TaxonomyTerm > currTerms = selectPostTerms ( postId , taxonomyTerm . taxonomy ) ; for ( TaxonomyTerm currTerm : currTerms ) { if ( currTerm . term . name . equals ( taxonomyTerm . term . name ) ) { return false ; } } Connection conn = null ; PreparedStatement stmt = null ; Timer . Context ctx = metrics . postTermsSetTimer . time ( ) ; try { conn = connectionSupplier . getConnection ( ) ; stmt = conn . prepareStatement ( insertPostTermSQL ) ; stmt . setLong ( 1 , postId ) ; stmt . setLong ( 2 , taxonomyTerm . id ) ; stmt . setInt ( 3 , currTerms . size ( ) ) ; // Add at the last position . . .
return stmt . executeUpdate ( ) > 0 ; } finally { ctx . stop ( ) ; SQLUtil . closeQuietly ( conn , stmt ) ; }
|
public class EJBMDOrchestrator { /** * F743-506 */
private static TimerMethodData . AutomaticTimer processAutomaticTimerFromXML ( com . ibm . ws . javaee . dd . ejb . Timer timer ) { } }
|
TimerSchedule timerSchedule = timer . getSchedule ( ) ; boolean persistent = ! timer . isSetPersistent ( ) || timer . isPersistent ( ) ; ScheduleExpression schedule = new ScheduleExpression ( ) ; String year = timerSchedule . getYear ( ) ; if ( year != null ) { schedule . year ( year ) ; } String month = timerSchedule . getMonth ( ) ; if ( month != null ) { schedule . month ( month ) ; } String dayOfMonth = timerSchedule . getDayOfMonth ( ) ; if ( dayOfMonth != null ) { schedule . dayOfMonth ( dayOfMonth ) ; } String dayOfWeek = timerSchedule . getDayOfWeek ( ) ; if ( dayOfWeek != null ) { schedule . dayOfWeek ( dayOfWeek ) ; } String hour = timerSchedule . getHour ( ) ; if ( hour != null ) { schedule . hour ( hour ) ; } String minute = timerSchedule . getMinute ( ) ; if ( minute != null ) { schedule . minute ( minute ) ; } String second = timerSchedule . getSecond ( ) ; if ( second != null ) { schedule . second ( second ) ; } schedule . timezone ( timer . getTimezone ( ) ) ; String start = timer . getStart ( ) ; String end = timer . getEnd ( ) ; Serializable info = timer . getInfo ( ) ; return new TimerMethodData . AutomaticTimer ( true , persistent , schedule , start , end , info ) ; // F743-506CodRev
|
public class TypeAnalysis { /** * Get the set of exceptions that can be thrown on given edge . This should
* only be called after the analysis completes .
* @ param edge
* the Edge
* @ return the ExceptionSet */
public ExceptionSet getEdgeExceptionSet ( Edge edge ) { } }
|
CachedExceptionSet cachedExceptionSet = thrownExceptionSetMap . get ( edge . getSource ( ) ) ; return cachedExceptionSet . getEdgeExceptionSet ( edge ) ;
|
public class MethodHandleConstant { /** * Writes the contents of the pool entry . */
@ Override void write ( ByteCodeWriter out ) throws IOException { } }
|
out . write ( ConstantPool . CP_METHODHANDLE ) ; out . write ( _type . getCode ( ) ) ; out . writeShort ( _entry . getIndex ( ) ) ;
|
public class Utils { /** * 文字列が空文字か判定する 。
* @ param str
* @ return */
public static boolean isEmpty ( final String str ) { } }
|
if ( str == null || str . isEmpty ( ) ) { return true ; } if ( str . length ( ) == 1 ) { return str . charAt ( 0 ) == '\u0000' ; } return false ;
|
public class RunScriptProcess { /** * Get the main record for this screen .
* @ return The main record ( or null if none ) . */
public Record getMainRecord ( ) { } }
|
Record record = super . getMainRecord ( ) ; if ( record == null ) record = new Script ( this ) ; return record ;
|
public class CommandLine { /** * Delegates to { @ link # call ( Class , IFactory , PrintStream , PrintStream , Help . Ansi , String . . . ) } with
* { @ code System . err } for diagnostic error messages , and { @ link Help . Ansi # AUTO } .
* @ param callableClass class of the command to call when { @ linkplain # parseArgs ( String . . . ) parsing } succeeds .
* @ param factory the factory responsible for instantiating the specified callable class and potentially injecting other components
* @ param out the printStream to print the usage help message to when the user requested help
* @ param args the command line arguments to parse
* @ param < C > the annotated class must implement Callable
* @ param < T > the return type of the most specific command ( must implement { @ code Callable } )
* @ throws InitializationException if the specified class cannot be instantiated by the factory , or does not have a { @ link Command } , { @ link Option } or { @ link Parameters } annotation
* @ throws ExecutionException if the Callable throws an exception
* @ return { @ code null } if an error occurred while parsing the command line options , or if help was requested and printed . Otherwise returns the result of calling the Callable
* @ deprecated use { @ link # execute ( String . . . ) } instead
* @ since 3.2 */
@ Deprecated public static < C extends Callable < T > , T > T call ( Class < C > callableClass , IFactory factory , PrintStream out , String ... args ) { } }
|
return call ( callableClass , factory , out , System . err , Help . Ansi . AUTO , args ) ;
|
public class FlatteningDeserializer { /** * Gets a module wrapping this serializer as an adapter for the Jackson
* ObjectMapper .
* @ param mapper the object mapper for default deserializations
* @ return a simple module to be plugged onto Jackson ObjectMapper . */
public static SimpleModule getModule ( final ObjectMapper mapper ) { } }
|
SimpleModule module = new SimpleModule ( ) ; module . setDeserializerModifier ( new BeanDeserializerModifier ( ) { @ Override public JsonDeserializer < ? > modifyDeserializer ( DeserializationConfig config , BeanDescription beanDesc , JsonDeserializer < ? > deserializer ) { if ( beanDesc . getBeanClass ( ) . getAnnotation ( JsonFlatten . class ) != null ) { return new FlatteningDeserializer ( beanDesc . getBeanClass ( ) , deserializer , mapper ) ; } return deserializer ; } } ) ; return module ;
|
public class MethodNode { /** * The type descriptor for a method node is a string containing the name of the method , its return type ,
* and its parameter types in a canonical form . For simplicity , I ' m using the format of a Java declaration
* without parameter names .
* @ return the type descriptor */
public String getTypeDescriptor ( ) { } }
|
if ( typeDescriptor == null ) { StringBuilder buf = new StringBuilder ( name . length ( ) + parameters . length * 10 ) ; buf . append ( returnType . getName ( ) ) ; buf . append ( ' ' ) ; buf . append ( name ) ; buf . append ( '(' ) ; for ( int i = 0 ; i < parameters . length ; i ++ ) { if ( i > 0 ) { buf . append ( ", " ) ; } Parameter param = parameters [ i ] ; buf . append ( formatTypeName ( param . getType ( ) ) ) ; } buf . append ( ')' ) ; typeDescriptor = buf . toString ( ) ; } return typeDescriptor ;
|
public class AlertPolicyServiceClient { /** * Lists the existing alerting policies for the project .
* < p > Sample code :
* < pre > < code >
* try ( AlertPolicyServiceClient alertPolicyServiceClient = AlertPolicyServiceClient . create ( ) ) {
* ProjectName name = ProjectName . of ( " [ PROJECT ] " ) ;
* for ( AlertPolicy element : alertPolicyServiceClient . listAlertPolicies ( name ) . iterateAll ( ) ) {
* / / doThingsWith ( element ) ;
* < / code > < / pre >
* @ param name The project whose alert policies are to be listed . The format is
* < p > projects / [ PROJECT _ ID ]
* < p > Note that this field names the parent container in which the alerting policies to be
* listed are stored . To retrieve a single alerting policy by name , use the
* [ GetAlertPolicy ] [ google . monitoring . v3 . AlertPolicyService . GetAlertPolicy ] operation ,
* instead .
* @ throws com . google . api . gax . rpc . ApiException if the remote call fails */
public final ListAlertPoliciesPagedResponse listAlertPolicies ( ProjectName name ) { } }
|
ListAlertPoliciesRequest request = ListAlertPoliciesRequest . newBuilder ( ) . setName ( name == null ? null : name . toString ( ) ) . build ( ) ; return listAlertPolicies ( request ) ;
|
public class AWSIotClient { /** * Deletes a registered CA certificate .
* @ param deleteCACertificateRequest
* Input for the DeleteCACertificate operation .
* @ return Result of the DeleteCACertificate operation returned by the service .
* @ throws InvalidRequestException
* The request is not valid .
* @ throws CertificateStateException
* The certificate operation is not allowed .
* @ throws ThrottlingException
* The rate exceeds the limit .
* @ throws UnauthorizedException
* You are not authorized to perform this operation .
* @ throws ServiceUnavailableException
* The service is temporarily unavailable .
* @ throws InternalFailureException
* An unexpected error has occurred .
* @ throws ResourceNotFoundException
* The specified resource does not exist .
* @ sample AWSIot . DeleteCACertificate */
@ Override public DeleteCACertificateResult deleteCACertificate ( DeleteCACertificateRequest request ) { } }
|
request = beforeClientExecution ( request ) ; return executeDeleteCACertificate ( request ) ;
|
public class LittleEndianDataOutputStream { /** * Writes a string to the underlying output stream as a sequence of
* characters . Each character is written to the data output stream as
* if by the { @ code writeChar } method .
* @ param pString a { @ code String } value to be written .
* @ throws IOException if the underlying stream throws an IOException .
* @ see # writeChar ( int )
* @ see # out */
public void writeChars ( String pString ) throws IOException { } }
|
int length = pString . length ( ) ; for ( int i = 0 ; i < length ; i ++ ) { int c = pString . charAt ( i ) ; out . write ( c & 0xFF ) ; out . write ( ( c >>> 8 ) & 0xFF ) ; } bytesWritten += length * 2 ;
|
public class HMM { /** * Returns the most likely state sequence given the observation sequence by
* the Viterbi algorithm , which maximizes the probability of
* < code > P ( I | O , HMM ) < / code > . In the calculation , we may get ties . In this
* case , one of them is chosen randomly .
* @ param o an observation sequence .
* @ return the most likely state sequence . */
public int [ ] predict ( int [ ] o ) { } }
|
// The porbability of the most probable path .
double [ ] [ ] trellis = new double [ o . length ] [ numStates ] ; // Backtrace .
int [ ] [ ] psy = new int [ o . length ] [ numStates ] ; // The most likely state sequence .
int [ ] s = new int [ o . length ] ; // forward
for ( int i = 0 ; i < numStates ; i ++ ) { trellis [ 0 ] [ i ] = log ( pi [ i ] ) + log ( b [ i ] [ o [ 0 ] ] ) ; psy [ 0 ] [ i ] = 0 ; } for ( int t = 1 ; t < o . length ; t ++ ) { for ( int j = 0 ; j < numStates ; j ++ ) { double maxDelta = Double . NEGATIVE_INFINITY ; int maxPsy = 0 ; for ( int i = 0 ; i < numStates ; i ++ ) { double delta = trellis [ t - 1 ] [ i ] + log ( a [ i ] [ j ] ) ; if ( maxDelta < delta ) { maxDelta = delta ; maxPsy = i ; } } trellis [ t ] [ j ] = maxDelta + log ( b [ j ] [ o [ t ] ] ) ; psy [ t ] [ j ] = maxPsy ; } } // trace back
int n = o . length - 1 ; double maxDelta = Double . NEGATIVE_INFINITY ; for ( int i = 0 ; i < numStates ; i ++ ) { if ( maxDelta < trellis [ n ] [ i ] ) { maxDelta = trellis [ n ] [ i ] ; s [ n ] = i ; } } for ( int t = n ; t -- > 0 ; ) { s [ t ] = psy [ t + 1 ] [ s [ t + 1 ] ] ; } return s ;
|
public class Matrix4f { /** * Apply scaling to this matrix by scaling the base axes by the given < code > xyz . x < / code > ,
* < code > xyz . y < / code > and < code > xyz . z < / code > factors , respectively .
* If < code > M < / code > is < code > this < / code > matrix and < code > S < / code > the scaling matrix ,
* then the new matrix will be < code > M * S < / code > . So when transforming a
* vector < code > v < / code > with the new matrix by using < code > M * S * v < / code > , the
* scaling will be applied first !
* @ param xyz
* the factors of the x , y and z component , respectively
* @ return a matrix holding the result */
public Matrix4f scale ( Vector3fc xyz ) { } }
|
return scale ( xyz . x ( ) , xyz . y ( ) , xyz . z ( ) , thisOrNew ( ) ) ;
|
public class ApiOvhOrder { /** * Get prices and contracts information
* REST : GET / order / dedicated / server / { serviceName } / ipMigration / { duration }
* @ param ip [ required ] The IP to move to this server
* @ param token [ required ] IP migration token
* @ param serviceName [ required ] The internal name of your dedicated server
* @ param duration [ required ] Duration */
public OvhOrder dedicated_server_serviceName_ipMigration_duration_GET ( String serviceName , String duration , String ip , String token ) throws IOException { } }
|
String qPath = "/order/dedicated/server/{serviceName}/ipMigration/{duration}" ; StringBuilder sb = path ( qPath , serviceName , duration ) ; query ( sb , "ip" , ip ) ; query ( sb , "token" , token ) ; String resp = exec ( qPath , "GET" , sb . toString ( ) , null ) ; return convertTo ( resp , OvhOrder . class ) ;
|
public class EquationsBFGS { /** * BFGS inverse hessian update equation that orders the multiplications to minimize the number of operations .
* @ param H symmetric inverse matrix being updated
* @ param s change in state
* @ param y change in gradient
* @ param tempV0 Storage vector of length N
* @ param tempV1 Storage vector of length N */
public static void inverseUpdate ( DMatrixRMaj H , DMatrixRMaj s , DMatrixRMaj y , DMatrixRMaj tempV0 , DMatrixRMaj tempV1 ) { } }
|
double alpha = VectorVectorMult_DDRM . innerProdA ( y , H , y ) ; double p = 1.0 / VectorVectorMult_DDRM . innerProd ( s , y ) ; CommonOps_DDRM . mult ( H , y , tempV0 ) ; CommonOps_DDRM . multTransA ( y , H , tempV1 ) ; VectorVectorMult_DDRM . rank1Update ( - p , H , tempV0 , s ) ; VectorVectorMult_DDRM . rank1Update ( - p , H , s , tempV1 ) ; VectorVectorMult_DDRM . rank1Update ( p * alpha * p + p , H , s , s ) ;
|
public class MigrationUtils { /** * Skips bytes corresponding to serialized states . In flink 1.6 + the states are no longer kept in state . */
static void skipSerializedStates ( DataInputView in ) throws IOException { } }
|
TypeSerializer < String > nameSerializer = StringSerializer . INSTANCE ; TypeSerializer < State . StateType > stateTypeSerializer = new EnumSerializer < > ( State . StateType . class ) ; TypeSerializer < StateTransitionAction > actionSerializer = new EnumSerializer < > ( StateTransitionAction . class ) ; final int noOfStates = in . readInt ( ) ; for ( int i = 0 ; i < noOfStates ; i ++ ) { nameSerializer . deserialize ( in ) ; stateTypeSerializer . deserialize ( in ) ; } for ( int i = 0 ; i < noOfStates ; i ++ ) { String srcName = nameSerializer . deserialize ( in ) ; int noOfTransitions = in . readInt ( ) ; for ( int j = 0 ; j < noOfTransitions ; j ++ ) { String src = nameSerializer . deserialize ( in ) ; Preconditions . checkState ( src . equals ( srcName ) , "Source Edge names do not match (" + srcName + " - " + src + ")." ) ; nameSerializer . deserialize ( in ) ; actionSerializer . deserialize ( in ) ; try { skipCondition ( in ) ; } catch ( ClassNotFoundException e ) { e . printStackTrace ( ) ; } } }
|
public class Positions { /** * Positions the owner to the top inside its parent . < br >
* Respects the parent padding .
* @ param spacing the spacing
* @ return the int supplier */
public static IntSupplier topAligned ( IChild < ? > owner , int spacing ) { } }
|
return ( ) -> { return Padding . of ( owner . getParent ( ) ) . top ( ) + spacing ; } ;
|
public class BatchImportFindingsResult { /** * The list of the findings that cannot be imported .
* < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use
* { @ link # setFailedFindings ( java . util . Collection ) } or { @ link # withFailedFindings ( java . util . Collection ) } if you want
* to override the existing values .
* @ param failedFindings
* The list of the findings that cannot be imported .
* @ return Returns a reference to this object so that method calls can be chained together . */
public BatchImportFindingsResult withFailedFindings ( ImportFindingsError ... failedFindings ) { } }
|
if ( this . failedFindings == null ) { setFailedFindings ( new java . util . ArrayList < ImportFindingsError > ( failedFindings . length ) ) ; } for ( ImportFindingsError ele : failedFindings ) { this . failedFindings . add ( ele ) ; } return this ;
|
public class WxPayApiConfig { /** * 构建查询签约关系的Map
* @ return 查询签约关系的Map */
public Map < String , String > querycontractBuild ( ) { } }
|
Map < String , String > map = new HashMap < String , String > ( ) ; map . put ( "appid" , getAppId ( ) ) ; map . put ( "mch_id" , getMchId ( ) ) ; if ( StrKit . notBlank ( getPlanId ( ) ) ) { map . put ( "plan_id" , getPlanId ( ) ) ; map . put ( "contract_code" , getContractCode ( ) ) ; } else { map . put ( "contract_id" , getContractId ( ) ) ; } map . put ( "version" , getVersion ( ) ) ; map . put ( "sign" , PaymentKit . createSign ( map , getPaternerKey ( ) ) ) ; return map ;
|
public class ExecutionEnvironment { /** * Creates a { @ link DataSet } that represents the primitive type produced by reading the given file line wise .
* This method is similar to { @ link # readCsvFile ( String ) } with single field , but it produces a DataSet not through
* { @ link org . apache . flink . api . java . tuple . Tuple1 } .
* @ param filePath The path of the file , as a URI ( e . g . , " file : / / / some / local / file " or " hdfs : / / host : port / file / path " ) .
* @ param typeClass The primitive type class to be read .
* @ return A { @ link DataSet } that represents the data read from the given file as primitive type . */
public < X > DataSource < X > readFileOfPrimitives ( String filePath , Class < X > typeClass ) { } }
|
Preconditions . checkNotNull ( filePath , "The file path may not be null." ) ; return new DataSource < > ( this , new PrimitiveInputFormat < > ( new Path ( filePath ) , typeClass ) , TypeExtractor . getForClass ( typeClass ) , Utils . getCallLocationName ( ) ) ;
|
public class MetadataFinder { /** * Get the set of metadata providers that can offer metadata for tracks loaded from the specified media .
* @ param sourceMedia the media whose metadata providers are desired , or { @ code null } to get the set of
* metadata providers that can offer metadata for all media .
* @ return any registered metadata providers that reported themselves as supporting tracks from that media */
public Set < MetadataProvider > getMetadataProviders ( MediaDetails sourceMedia ) { } }
|
String key = ( sourceMedia == null ) ? "" : sourceMedia . hashKey ( ) ; Set < MetadataProvider > result = metadataProviders . get ( key ) ; if ( result == null ) { return Collections . emptySet ( ) ; } return Collections . unmodifiableSet ( new HashSet < MetadataProvider > ( result ) ) ;
|
public class DateFormatter { /** * Parse some text into a { @ link Date } , according to RFC6265
* @ param txt text to parse
* @ param start the start index inside { @ code txt }
* @ param end the end index inside { @ code txt }
* @ return a { @ link Date } , or null if text couldn ' t be parsed */
public static Date parseHttpDate ( CharSequence txt , int start , int end ) { } }
|
int length = end - start ; if ( length == 0 ) { return null ; } else if ( length < 0 ) { throw new IllegalArgumentException ( "Can't have end < start" ) ; } else if ( length > 64 ) { throw new IllegalArgumentException ( "Can't parse more than 64 chars," + "looks like a user error or a malformed header" ) ; } return formatter ( ) . parse0 ( checkNotNull ( txt , "txt" ) , start , end ) ;
|
public class ServicePropertiesUtils { /** * Returns the subset of properties that start with the prefix . The returned
* dictionary will have as keys the original key without the prefix .
* @ param serviceReference service reference ; cannot be null
* @ param prefix property keys prefix ; cannot be null
* @ return subset of properties or null if there is no property that starts
* with expected prefix */
public static Map < String , Object > getSubsetStartingWith ( final ServiceReference < ? > serviceReference , final String prefix ) { } }
|
final Map < String , Object > subset = new HashMap < > ( ) ; for ( String key : serviceReference . getPropertyKeys ( ) ) { if ( key != null && key . startsWith ( prefix ) && key . trim ( ) . length ( ) > prefix . length ( ) ) { subset . put ( key . substring ( prefix . length ( ) ) , serviceReference . getProperty ( key ) ) ; } } if ( subset . isEmpty ( ) ) { return null ; } return subset ;
|
public class Symm { /** * Decrypt a password
* Skip Symm . ENC
* @ param password
* @ param os
* @ return
* @ throws IOException */
public long depass ( final String password , final OutputStream os ) throws IOException { } }
|
int offset = password . startsWith ( ENC ) ? 4 : 0 ; final ByteArrayOutputStream baos = new ByteArrayOutputStream ( ) ; final ByteArrayInputStream bais = new ByteArrayInputStream ( password . getBytes ( ) , offset , password . length ( ) - offset ) ; exec ( new AESExec ( ) { @ Override public void exec ( AES aes ) throws IOException { CipherOutputStream cos = aes . outputStream ( baos , false ) ; decode ( bais , cos ) ; cos . close ( ) ; // flush
} } ) ; byte [ ] bytes = baos . toByteArray ( ) ; DataInputStream dis = new DataInputStream ( new ByteArrayInputStream ( bytes ) ) ; long time ; if ( this . getClass ( ) . getSimpleName ( ) . startsWith ( "base64" ) ) { // don ' t expose randomization
os . write ( bytes ) ; time = 0L ; } else { int start = 0 ; for ( int i = 0 ; i < 3 ; ++ i ) { start += Math . abs ( dis . readByte ( ) ) ; } start %= 0x7 ; for ( int i = 0 ; i < start ; ++ i ) { dis . readByte ( ) ; } time = ( dis . readInt ( ) & 0xFFFF ) | ( System . currentTimeMillis ( ) & 0xFFFF0000 ) ; int minlength = dis . readByte ( ) ; if ( minlength < 0x9 ) { DataOutputStream dos = new DataOutputStream ( os ) ; for ( int i = 0 ; i < minlength ; ++ i ) { dis . readByte ( ) ; dos . writeByte ( dis . readByte ( ) ) ; } } else { int pre = ( ( Byte . SIZE * 3 + Integer . SIZE + Byte . SIZE ) / Byte . SIZE ) + start ; os . write ( bytes , pre , bytes . length - pre ) ; } } return time ;
|
public class WhiteboxImpl { /** * Concatenate strings .
* @ param stringsToConcatenate the strings to concatenate
* @ return the string */
private static String concatenateStrings ( String ... stringsToConcatenate ) { } }
|
StringBuilder builder = new StringBuilder ( ) ; final int stringsLength = stringsToConcatenate . length ; for ( int i = 0 ; i < stringsLength ; i ++ ) { if ( i == stringsLength - 1 && stringsLength != 1 ) { builder . append ( " or " ) ; } else if ( i != 0 ) { builder . append ( ", " ) ; } builder . append ( stringsToConcatenate [ i ] ) ; } return builder . toString ( ) ;
|
public class LazyField { /** * LazyField is not thread - safe for write access . Synchronizations are needed
* under read / write situations . */
public MessageLite setValue ( MessageLite value ) { } }
|
MessageLite originalValue = this . value ; this . value = value ; bytes = null ; isDirty = true ; return originalValue ;
|
public class ModuleUtils { /** * Attempts to convert a module class name to an instantiate module by applying heuristics to
* construct it .
* It first tries to instantiate the provided class itself as a module , if possible . If it is not
* a module , it looks for an inner class called " Module " ,
* " FromParametersModule " , or " FromParamsModule " which is a { @ link Module } .
* When instantiating a module , it tries to find a constructor taking parameters and an annotation
* ( if annotation is present ) , just an annotation , just parameters , or zero arguments . */
@ Nonnull // it ' s reflection , can ' t avoid unchecked cast
@ SuppressWarnings ( "unchecked" ) public static Module classNameToModule ( final Parameters parameters , final Class < ? > clazz , Optional < ? extends Class < ? extends Annotation > > annotation ) throws IllegalAccessException , InvocationTargetException , InstantiationException { } }
|
if ( Module . class . isAssignableFrom ( clazz ) ) { return instantiateModule ( ( Class < ? extends Module > ) clazz , parameters , annotation ) ; } else { // to abbreviate the names of modules in param files , if a class name is provided which
// is not a Module , we check if there is an inner - class named Module which is a Module
for ( final String fallbackInnerClassName : FALLBACK_INNER_CLASS_NAMES ) { final String fullyQualifiedName = clazz . getName ( ) + "$" + fallbackInnerClassName ; final Class < ? extends Module > innerModuleClazz ; try { innerModuleClazz = ( Class < ? extends Module > ) Class . forName ( fullyQualifiedName ) ; } catch ( ClassNotFoundException cnfe ) { // it ' s okay , we just try the next one
continue ; } if ( Module . class . isAssignableFrom ( innerModuleClazz ) ) { return instantiateModule ( innerModuleClazz , parameters , annotation ) ; } else { throw new RuntimeException ( clazz . getName ( ) + " is not a module; " + fullyQualifiedName + " exists but is not a module" ) ; } } // if we got here , we didn ' t find any module
throw new RuntimeException ( "Could not find inner class of " + clazz . getName ( ) + " matching any of " + FALLBACK_INNER_CLASS_NAMES ) ; }
|
public class TreeIndexHeader { /** * Writes this header to the specified file . Writes the integer values of
* { @ link # dirCapacity } , { @ link # leafCapacity } , { @ link # dirMinimum } ,
* { @ link # leafMinimum } and { @ link # emptyPagesSize } to the file . */
@ Override public void writeHeader ( RandomAccessFile file ) throws IOException { } }
|
super . writeHeader ( file ) ; file . writeInt ( this . dirCapacity ) ; file . writeInt ( this . leafCapacity ) ; file . writeInt ( this . dirMinimum ) ; file . writeInt ( this . leafMinimum ) ; file . writeInt ( this . emptyPagesSize ) ; file . writeInt ( this . largestPageID ) ;
|
public class TasksImpl { /** * Lists all of the tasks that are associated with the specified job .
* For multi - instance tasks , information such as affinityId , executionInfo and nodeInfo refer to the primary task . Use the list subtasks API to retrieve information about subtasks .
* @ param jobId The ID of the job .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the observable to the PagedList & lt ; CloudTask & gt ; object */
public Observable < ServiceResponseWithHeaders < Page < CloudTask > , TaskListHeaders > > listWithServiceResponseAsync ( final String jobId ) { } }
|
return listSinglePageAsync ( jobId ) . concatMap ( new Func1 < ServiceResponseWithHeaders < Page < CloudTask > , TaskListHeaders > , Observable < ServiceResponseWithHeaders < Page < CloudTask > , TaskListHeaders > > > ( ) { @ Override public Observable < ServiceResponseWithHeaders < Page < CloudTask > , TaskListHeaders > > call ( ServiceResponseWithHeaders < Page < CloudTask > , TaskListHeaders > page ) { String nextPageLink = page . body ( ) . nextPageLink ( ) ; if ( nextPageLink == null ) { return Observable . just ( page ) ; } return Observable . just ( page ) . concatWith ( listNextWithServiceResponseAsync ( nextPageLink , null ) ) ; } } ) ;
|
public class AsmUtils { /** * Changes the access level for the specified field for a class .
* @ param clazz the clazz
* @ param fieldName the field name
* @ return the field */
public static Field changeFieldAccess ( Class < ? > clazz , String fieldName ) { } }
|
return changeFieldAccess ( clazz , fieldName , fieldName , false ) ;
|
public class PrePopulatedValidationSupport { /** * Add a new CodeSystem resource which will be available to the validator . Note that
* { @ link CodeSystem # getUrl ( ) the URL field ) in this resource must contain a value as this
* value will be used as the logical URL .
* Note that if the URL is a canonical FHIR URL ( e . g . http : / / hl7 . org / StructureDefinition / Extension ) ,
* it will be stored in three ways :
* < ul >
* < li > Extension < / li >
* < li > StructureDefinition / Extension < / li >
* < li > http : / / hl7 . org / StructureDefinition / Extension < / li >
* < / ul > */
public void addCodeSystem ( CodeSystem theCodeSystem ) { } }
|
Validate . notBlank ( theCodeSystem . getUrl ( ) , "theCodeSystem.getUrl() must not return a value" ) ; addToMap ( theCodeSystem , myCodeSystems , theCodeSystem . getUrl ( ) ) ;
|
public class WeakBB { /** * Verify a WBB signature for a certain message
* @ param pk Public key
* @ param sig Signature
* @ param m Message
* @ return True iff valid */
public static boolean weakBBVerify ( ECP2 pk , ECP sig , BIG m ) { } }
|
ECP2 p = new ECP2 ( ) ; p . copy ( pk ) ; p . add ( IdemixUtils . genG2 . mul ( m ) ) ; p . affine ( ) ; return PAIR . fexp ( PAIR . ate ( p , sig ) ) . equals ( IdemixUtils . genGT ) ;
|
public class SheetResourcesImpl { /** * Get a sheet as a PDF file .
* It mirrors to the following Smartsheet REST API method : GET / sheet / { id } with " application / pdf " Accept HTTP
* header
* Exceptions :
* IllegalArgumentException : if outputStream is null
* InvalidRequestException : if there is any problem with the REST API request
* AuthorizationException : if there is any problem with the REST API authorization ( access token )
* ResourceNotFoundException : if the resource can not be found
* ServiceUnavailableException : if the REST API service is not available ( possibly due to rate limiting )
* SmartsheetRestException : if there is any other REST API related error occurred during the operation
* SmartsheetException : if there is any other error occurred during the operation
* @ param id the id
* @ param outputStream the output stream to which the PDF file will be written .
* @ param paperSize the optional paper size
* @ return the sheet as pdf
* @ throws SmartsheetException the smartsheet exception */
public void getSheetAsPDF ( long id , OutputStream outputStream , PaperSize paperSize ) throws SmartsheetException { } }
|
getSheetAsFile ( id , paperSize , outputStream , "application/pdf" ) ;
|
public class MDIDesktopPane { /** * Cascade all internal frames , un - iconfying any minimized first */
public void cascadeFrames ( ) { } }
|
restoreFrames ( ) ; int x = 0 ; int y = 0 ; JInternalFrame allFrames [ ] = getAllFrames ( ) ; manager . setNormalSize ( ) ; int frameHeight = getBounds ( ) . height - 5 - allFrames . length * FRAME_OFFSET ; int frameWidth = getBounds ( ) . width - 5 - allFrames . length * FRAME_OFFSET ; for ( int i = allFrames . length - 1 ; i >= 0 ; i -- ) { allFrames [ i ] . setSize ( frameWidth , frameHeight ) ; allFrames [ i ] . setLocation ( x , y ) ; x = x + FRAME_OFFSET ; y = y + FRAME_OFFSET ; }
|
public class CmsEncoder { /** * Decodes HTML entity references like < code > & amp ; # 8364 ; < / code > that are contained in the
* String to a regular character , but only if that character is contained in the given
* encodings charset . < p >
* @ param input the input to decode the HTML entities in
* @ param encoding the charset to decode the input for
* @ return the input with the decoded HTML entities
* @ see # encodeHtmlEntities ( String , String ) */
public static String decodeHtmlEntities ( String input , String encoding ) { } }
|
Matcher matcher = ENTITIY_PATTERN . matcher ( input ) ; StringBuffer result = new StringBuffer ( input . length ( ) ) ; Charset charset = Charset . forName ( encoding ) ; CharsetEncoder encoder = charset . newEncoder ( ) ; while ( matcher . find ( ) ) { String entity = matcher . group ( ) ; String value = entity . substring ( 2 , entity . length ( ) - 1 ) ; int c = Integer . valueOf ( value ) . intValue ( ) ; if ( c < 128 ) { // first 128 chars are contained in almost every charset
entity = new String ( new char [ ] { ( char ) c } ) ; // this is intended as performance improvement since
// the canEncode ( ) operation appears quite CPU heavy
} else if ( encoder . canEncode ( ( char ) c ) ) { // encoder can encode this char
entity = new String ( new char [ ] { ( char ) c } ) ; } matcher . appendReplacement ( result , entity ) ; } matcher . appendTail ( result ) ; return result . toString ( ) ;
|
public class Gamma { /** * Regularized Upper / Complementary Incomplete Gamma Function
* Q ( s , x ) = 1 - P ( s , x ) = 1 - < i > < big > & # 8747 ; < / big > < sub > < small > 0 < / small > < / sub > < sup > < small > x < / small > < / sup > e < sup > - t < / sup > t < sup > ( s - 1 ) < / sup > dt < / i > */
public static double regularizedUpperIncompleteGamma ( double s , double x ) { } }
|
if ( s < 0.0 ) { throw new IllegalArgumentException ( "Invalid s: " + s ) ; } if ( x < 0.0 ) { throw new IllegalArgumentException ( "Invalid x: " + x ) ; } double igf = 0.0 ; if ( x != 0.0 ) { if ( x == 1.0 / 0.0 ) { igf = 1.0 ; } else { if ( x < s + 1.0 ) { // Series representation
igf = 1.0 - regularizedIncompleteGammaSeries ( s , x ) ; } else { // Continued fraction representation
igf = 1.0 - regularizedIncompleteGammaFraction ( s , x ) ; } } } return igf ;
|
public class VariablesInner { /** * Update a variable .
* @ param resourceGroupName Name of an Azure Resource group .
* @ param automationAccountName The name of the automation account .
* @ param variableName The variable name .
* @ param parameters The parameters supplied to the update variable operation .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the observable to the VariableInner object */
public Observable < VariableInner > updateAsync ( String resourceGroupName , String automationAccountName , String variableName , VariableUpdateParameters parameters ) { } }
|
return updateWithServiceResponseAsync ( resourceGroupName , automationAccountName , variableName , parameters ) . map ( new Func1 < ServiceResponse < VariableInner > , VariableInner > ( ) { @ Override public VariableInner call ( ServiceResponse < VariableInner > response ) { return response . body ( ) ; } } ) ;
|
public class PDTWebDateHelper { /** * create a RFC822 representation of a date .
* @ param aDateTime
* Date to print . May be < code > null < / code > .
* @ return the RFC822 represented by the given Date . < code > null < / code > if the
* parameter is < code > null < / code > . */
@ Nullable public static String getAsStringRFC822 ( @ Nullable final OffsetDateTime aDateTime ) { } }
|
if ( aDateTime == null ) return null ; return getAsStringRFC822 ( aDateTime . toZonedDateTime ( ) ) ;
|
public class Actor { /** * Create an OAuth2 client identifier . A client identified by user - provided identifier .
* @ param clientId the UAA client ID
* @ return the created { @ literal Actor } */
public static Actor client ( String clientId ) { } }
|
Assert . notNull ( clientId , "clientId must not be null" ) ; return new Actor ( OAUTH_CLIENT , clientId ) ;
|
public class StorageWriter { /** * Get the index stream for the specified keyLength , create it if needed */
private DataOutputStream getIndexStream ( int keyLength ) throws IOException { } }
|
// Resize array if necessary
if ( indexStreams . length <= keyLength ) { indexStreams = Arrays . copyOf ( indexStreams , keyLength + 1 ) ; indexFiles = Arrays . copyOf ( indexFiles , keyLength + 1 ) ; keyCounts = Arrays . copyOf ( keyCounts , keyLength + 1 ) ; maxOffsetLengths = Arrays . copyOf ( maxOffsetLengths , keyLength + 1 ) ; lastValues = Arrays . copyOf ( lastValues , keyLength + 1 ) ; lastValuesLength = Arrays . copyOf ( lastValuesLength , keyLength + 1 ) ; dataLengths = Arrays . copyOf ( dataLengths , keyLength + 1 ) ; } // Get or create stream
DataOutputStream dos = indexStreams [ keyLength ] ; if ( dos == null ) { File file = new File ( tempFolder , "temp_index" + keyLength + ".dat" ) ; file . deleteOnExit ( ) ; indexFiles [ keyLength ] = file ; dos = new DataOutputStream ( new BufferedOutputStream ( new FileOutputStream ( file ) ) ) ; indexStreams [ keyLength ] = dos ; dataLengths [ keyLength ] ++ ; } return dos ;
|
public class Http2StateUtil { /** * Creates a { @ code HttpCarbonRequest } from HttpRequest .
* @ param httpRequest the HTTPRequest message
* @ param http2SourceHandler the HTTP / 2 source handler
* @ return the CarbonRequest Message created from given HttpRequest */
public static HttpCarbonRequest setupCarbonRequest ( HttpRequest httpRequest , Http2SourceHandler http2SourceHandler ) { } }
|
ChannelHandlerContext ctx = http2SourceHandler . getChannelHandlerContext ( ) ; HttpCarbonRequest sourceReqCMsg = new HttpCarbonRequest ( httpRequest , new DefaultListener ( ctx ) ) ; sourceReqCMsg . setProperty ( POOLED_BYTE_BUFFER_FACTORY , new PooledDataStreamerFactory ( ctx . alloc ( ) ) ) ; sourceReqCMsg . setProperty ( CHNL_HNDLR_CTX , ctx ) ; sourceReqCMsg . setProperty ( Constants . SRC_HANDLER , http2SourceHandler ) ; HttpVersion protocolVersion = httpRequest . protocolVersion ( ) ; sourceReqCMsg . setProperty ( HTTP_VERSION , protocolVersion . majorVersion ( ) + "." + protocolVersion . minorVersion ( ) ) ; sourceReqCMsg . setProperty ( HTTP_METHOD , httpRequest . method ( ) . name ( ) ) ; InetSocketAddress localAddress = null ; // This check was added because in case of netty embedded channel , this could be of type ' EmbeddedSocketAddress ' .
if ( ctx . channel ( ) . localAddress ( ) instanceof InetSocketAddress ) { localAddress = ( InetSocketAddress ) ctx . channel ( ) . localAddress ( ) ; } sourceReqCMsg . setProperty ( LOCAL_ADDRESS , localAddress ) ; sourceReqCMsg . setProperty ( LISTENER_PORT , localAddress != null ? localAddress . getPort ( ) : null ) ; sourceReqCMsg . setProperty ( LISTENER_INTERFACE_ID , http2SourceHandler . getInterfaceId ( ) ) ; sourceReqCMsg . setProperty ( PROTOCOL , HTTP_SCHEME ) ; String uri = httpRequest . uri ( ) ; sourceReqCMsg . setProperty ( REQUEST_URL , uri ) ; sourceReqCMsg . setProperty ( TO , uri ) ; return sourceReqCMsg ;
|
public class StopReplicationTaskRequestMarshaller { /** * Marshall the given parameter object . */
public void marshall ( StopReplicationTaskRequest stopReplicationTaskRequest , ProtocolMarshaller protocolMarshaller ) { } }
|
if ( stopReplicationTaskRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( stopReplicationTaskRequest . getReplicationTaskArn ( ) , REPLICATIONTASKARN_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
|
public class AuditEvent { /** * Get a Map of all the initiator keys / values .
* @ return - Map of all the initiator keys / values */
public Map < String , Object > getInitiator ( ) { } }
|
TreeMap < String , Object > map = new TreeMap < String , Object > ( ) ; synchronized ( eventMap ) { for ( Entry < String , Object > entry : eventMap . entrySet ( ) ) { if ( entry . getKey ( ) . startsWith ( INITIATOR ) ) map . put ( entry . getKey ( ) , entry . getValue ( ) ) ; } } return map ;
|
public class LRUCache { /** * Try to get the value from cache .
* If not found , create the value by { @ link MemoizeCache . ValueProvider } and put it into the cache , at last return the value .
* The operation is completed atomically .
* @ param key
* @ param valueProvider provide the value if the associated value not found */
@ Override public V getAndPut ( K key , ValueProvider < ? super K , ? extends V > valueProvider ) { } }
|
return map . computeIfAbsent ( key , valueProvider :: provide ) ;
|
public class CmsWorkplace { /** * Returns the default html for a workplace page , including setting of DOCTYPE and
* inserting a header with the content - type , allowing the selection of an individual style sheet . < p >
* @ param segment the HTML segment ( START / END )
* @ param title the title of the page , if null no title tag is inserted
* @ param stylesheet the used style sheet , if null the default stylesheet ' workplace . css ' is inserted
* @ return the default html for a workplace page */
public String pageHtmlStyle ( int segment , String title , String stylesheet ) { } }
|
if ( segment == HTML_START ) { StringBuffer result = new StringBuffer ( 512 ) ; result . append ( "<!DOCTYPE html>\n" ) ; result . append ( "<html>\n<head>\n" ) ; if ( title != null ) { result . append ( "<title>" ) ; result . append ( title ) ; result . append ( "</title>\n" ) ; } result . append ( "<meta HTTP-EQUIV=\"Content-Type\" CONTENT=\"text/html; charset=" ) ; result . append ( getEncoding ( ) ) ; result . append ( "\">\n" ) ; result . append ( "<link rel=\"stylesheet\" type=\"text/css\" href=\"" ) ; result . append ( getStyleUri ( getJsp ( ) , stylesheet == null ? "workplace.css" : stylesheet ) ) ; result . append ( "\">\n" ) ; return result . toString ( ) ; } else { return "</html>" ; }
|
public class SingleDataProviderContext { /** * Adds the specified data provider to the validator under construction .
* @ param dataProvider Data provider to be added .
* @ return Context allowing further construction of the validator using the DSL . */
public MultipleDataProviderContext < DPO > read ( DataProvider < DPO > dataProvider ) { } }
|
if ( dataProvider != null ) { addedDataProviders . add ( dataProvider ) ; } // Change context
return new MultipleDataProviderContext < DPO > ( addedTriggers , addedDataProviders ) ;
|
public class SheetRenderer { /** * Encode client behaviors to widget config
* @ param context
* @ param sheet
* @ param wb
* @ throws IOException */
private void encodeBehaviors ( final FacesContext context , final Sheet sheet , final WidgetBuilder wb ) throws IOException { } }
|
// note we write out the onchange event here so we have the selected
// cell too
final Map < String , List < ClientBehavior > > behaviors = sheet . getClientBehaviors ( ) ; wb . append ( ",behaviors:{" ) ; final String clientId = sheet . getClientId ( ) ; // sort event ( manual since callBack prepends leading comma )
if ( behaviors . containsKey ( "sort" ) ) { final ClientBehaviorContext behaviorContext = ClientBehaviorContext . createClientBehaviorContext ( context , sheet , "sort" , sheet . getClientId ( context ) , null ) ; final AjaxBehavior ajaxBehavior = ( AjaxBehavior ) behaviors . get ( "sort" ) . get ( 0 ) ; ajaxBehavior . setUpdate ( StringUtils . defaultString ( ajaxBehavior . getUpdate ( ) ) + StringUtils . SPACE + clientId ) ; wb . append ( "sort" ) . append ( ":" ) . append ( "function(s, event)" ) . append ( "{" ) . append ( behaviors . get ( "sort" ) . get ( 0 ) . getScript ( behaviorContext ) ) . append ( "}" ) ; } else { // default sort event if none defined by user
wb . append ( "sort" ) . append ( ":" ) . append ( "function(s, event)" ) . append ( "{" ) . append ( "PrimeFaces.ab({source: '" ) . append ( clientId ) . append ( "',event: 'sort', process: '" ) . append ( clientId ) . append ( "', update: '" ) . append ( clientId ) . append ( "'});}" ) ; } // filter
if ( behaviors . containsKey ( "filter" ) ) { final ClientBehaviorContext behaviorContext = ClientBehaviorContext . createClientBehaviorContext ( context , sheet , "filter" , sheet . getClientId ( context ) , null ) ; final AjaxBehavior ajaxBehavior = ( AjaxBehavior ) behaviors . get ( "filter" ) . get ( 0 ) ; ajaxBehavior . setUpdate ( StringUtils . defaultString ( ajaxBehavior . getUpdate ( ) ) + StringUtils . SPACE + clientId ) ; wb . callback ( "filter" , "function(source, event)" , behaviors . get ( "filter" ) . get ( 0 ) . getScript ( behaviorContext ) ) ; } else { // default filter event if none defined by user
wb . callback ( "filter" , "function(source, event)" , "PrimeFaces.ab({s: '" + clientId + "', event: 'filter', process: '" + clientId + "', update: '" + clientId + "'});" ) ; } if ( behaviors . containsKey ( "change" ) ) { final ClientBehaviorContext behaviorContext = ClientBehaviorContext . createClientBehaviorContext ( context , sheet , "change" , sheet . getClientId ( context ) , null ) ; wb . callback ( "change" , "function(source, event)" , behaviors . get ( "change" ) . get ( 0 ) . getScript ( behaviorContext ) ) ; } if ( behaviors . containsKey ( "cellSelect" ) ) { final ClientBehaviorContext behaviorContext = ClientBehaviorContext . createClientBehaviorContext ( context , sheet , "cellSelect" , sheet . getClientId ( context ) , null ) ; wb . callback ( "cellSelect" , "function(source, event)" , behaviors . get ( "cellSelect" ) . get ( 0 ) . getScript ( behaviorContext ) ) ; } if ( behaviors . containsKey ( "columnSelect" ) ) { final ClientBehaviorContext behaviorContext = ClientBehaviorContext . createClientBehaviorContext ( context , sheet , "columnSelect" , sheet . getClientId ( context ) , null ) ; wb . callback ( "columnSelect" , "function(source, event)" , behaviors . get ( "columnSelect" ) . get ( 0 ) . getScript ( behaviorContext ) ) ; } if ( behaviors . containsKey ( "rowSelect" ) ) { final ClientBehaviorContext behaviorContext = ClientBehaviorContext . createClientBehaviorContext ( context , sheet , "rowSelect" , sheet . getClientId ( context ) , null ) ; wb . callback ( "rowSelect" , "function(source, event)" , behaviors . get ( "rowSelect" ) . get ( 0 ) . getScript ( behaviorContext ) ) ; } wb . append ( "}" ) ;
|
public class ModelsImpl { /** * Delete an entity role .
* @ param appId The application ID .
* @ param versionId The version ID .
* @ param entityId The entity ID .
* @ param roleId The entity role Id .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ throws ErrorResponseException thrown if the request is rejected by server
* @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
* @ return the OperationStatus object if successful . */
public OperationStatus deleteEntityRole ( UUID appId , String versionId , UUID entityId , UUID roleId ) { } }
|
return deleteEntityRoleWithServiceResponseAsync ( appId , versionId , entityId , roleId ) . toBlocking ( ) . single ( ) . body ( ) ;
|
public class DataLoader { /** * Creates new DataLoader with the specified batch loader function and with the provided options
* where the batch loader function returns a list of
* { @ link org . dataloader . Try } objects .
* @ param batchLoadFunction the batch load function to use that uses { @ link org . dataloader . Try } objects
* @ param options the options to use
* @ param < K > the key type
* @ param < V > the value type
* @ return a new DataLoader
* @ see # newDataLoaderWithTry ( BatchLoader ) */
@ SuppressWarnings ( "unchecked" ) public static < K , V > DataLoader < K , V > newDataLoaderWithTry ( BatchLoader < K , Try < V > > batchLoadFunction , DataLoaderOptions options ) { } }
|
return new DataLoader < > ( ( BatchLoader < K , V > ) batchLoadFunction , options ) ;
|
public class JavaScriptCompilerMojo { /** * Checks whether or not the file is minified .
* @ param file the file to check
* @ return { @ code true } if the file is minified , { @ code false } otherwise . This method only check for the file
* extension . */
public boolean isNotMinified ( File file ) { } }
|
return ! file . getName ( ) . endsWith ( "min.js" ) && ! file . getName ( ) . endsWith ( googleClosureMinifierSuffix + ".js" ) ;
|
public class CmsContainerPageCopier { /** * Uses the custom translation table to translate formatter id . < p >
* @ param formatterId the formatter id
* @ return the formatter replacement */
private CmsUUID maybeReplaceFormatter ( CmsUUID formatterId ) { } }
|
if ( m_customReplacements != null ) { CmsUUID replacement = m_customReplacements . get ( formatterId ) ; if ( replacement != null ) { return replacement ; } } return formatterId ;
|
public class FramedGraph { /** * Get a { @ link Vertex } given its unique identifier .
* @ param < V > Framing class annotated with { @ link peapod . annotations . Vertex }
* @ param id The unique identifier of the linked vertex to locate
* @ param clazz a framing class annotated with { @ link peapod . annotations . Vertex }
* @ return the framed vertex or { @ code null } when not found */
@ SuppressWarnings ( "unchecked" ) public < V > V v ( Object id , Class < V > clazz ) { } }
|
Iterator < Vertex > tr = graph . vertices ( id ) ; return tr . hasNext ( ) ? frame ( tr . next ( ) , clazz ) : null ;
|
public class TargetQueryRenderer { /** * Prints the short form of the predicate ( by omitting the complete URI and
* replacing it by a prefix name ) . */
private static String getAbbreviatedName ( String uri , PrefixManager pm , boolean insideQuotes ) { } }
|
return pm . getShortForm ( uri , insideQuotes ) ;
|
public class GCCBEZRGImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
@ Override public boolean eIsSet ( int featureID ) { } }
|
switch ( featureID ) { case AfplibPackage . GCCBEZRG__XPOS : return XPOS_EDEFAULT == null ? xpos != null : ! XPOS_EDEFAULT . equals ( xpos ) ; case AfplibPackage . GCCBEZRG__YPOS : return YPOS_EDEFAULT == null ? ypos != null : ! YPOS_EDEFAULT . equals ( ypos ) ; } return super . eIsSet ( featureID ) ;
|
public class SecureDfuImpl { /** * Sends the Execute operation code and awaits for a return notification containing status code .
* The Execute command will confirm the last chunk of data or the last command that was sent .
* Creating the same object again , instead of executing it allows to retransmitting it in case
* of a CRC error .
* @ throws DfuException
* @ throws DeviceDisconnectedException
* @ throws UploadAbortedException
* @ throws UnknownResponseException
* @ throws RemoteDfuException thrown when the returned status code is not equal to
* { @ link # DFU _ STATUS _ SUCCESS } . */
private void writeExecute ( ) throws DfuException , DeviceDisconnectedException , UploadAbortedException , UnknownResponseException , RemoteDfuException { } }
|
if ( ! mConnected ) throw new DeviceDisconnectedException ( "Unable to read Checksum: device disconnected" ) ; writeOpCode ( mControlPointCharacteristic , OP_CODE_EXECUTE ) ; final byte [ ] response = readNotificationResponse ( ) ; final int status = getStatusCode ( response , OP_CODE_EXECUTE_KEY ) ; if ( status == SecureDfuError . EXTENDED_ERROR ) throw new RemoteDfuExtendedErrorException ( "Executing object failed" , response [ 3 ] ) ; if ( status != DFU_STATUS_SUCCESS ) throw new RemoteDfuException ( "Executing object failed" , status ) ;
|
public class Boot { /** * Replies the identifier of the boot agent from the system ' s properties . The boot agent is launched with
* { @ link # startJanus ( Class , Object . . . ) } .
* @ return the identifier of the boot agent , or < code > null < / code > if it is unknown .
* @ since 2.0.2.0
* @ see JanusConfig # BOOT _ AGENT _ ID
* @ see # startJanus ( Class , Object . . . ) */
public static UUID getBootAgentIdentifier ( ) { } }
|
final String id = JanusConfig . getSystemProperty ( JanusConfig . BOOT_AGENT_ID ) ; if ( id != null && ! id . isEmpty ( ) ) { try { return UUID . fromString ( id ) ; } catch ( Throwable exception ) { } } return null ;
|
public class WithoutSpecification { /** * Replaces any interaction with a matched byte code element with a non - static field access on the first
* parameter of the matched element . When matching a non - static field access or method invocation , the
* substituted field is located on the same receiver type as the original access . For static access , the
* first argument is used as a receiver .
* @ param matcher A matcher for locating a field on the original interaction ' s receiver type .
* @ return A member substitution that replaces any matched byte code element with an access of the matched field . */
public MemberSubstitution replaceWithField ( ElementMatcher < ? super FieldDescription > matcher ) { } }
|
return replaceWith ( new Substitution . ForFieldAccess . OfMatchedField ( matcher ) ) ;
|
public class SplunkDestinationDescriptionMarshaller { /** * Marshall the given parameter object . */
public void marshall ( SplunkDestinationDescription splunkDestinationDescription , ProtocolMarshaller protocolMarshaller ) { } }
|
if ( splunkDestinationDescription == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( splunkDestinationDescription . getHECEndpoint ( ) , HECENDPOINT_BINDING ) ; protocolMarshaller . marshall ( splunkDestinationDescription . getHECEndpointType ( ) , HECENDPOINTTYPE_BINDING ) ; protocolMarshaller . marshall ( splunkDestinationDescription . getHECToken ( ) , HECTOKEN_BINDING ) ; protocolMarshaller . marshall ( splunkDestinationDescription . getHECAcknowledgmentTimeoutInSeconds ( ) , HECACKNOWLEDGMENTTIMEOUTINSECONDS_BINDING ) ; protocolMarshaller . marshall ( splunkDestinationDescription . getRetryOptions ( ) , RETRYOPTIONS_BINDING ) ; protocolMarshaller . marshall ( splunkDestinationDescription . getS3BackupMode ( ) , S3BACKUPMODE_BINDING ) ; protocolMarshaller . marshall ( splunkDestinationDescription . getS3DestinationDescription ( ) , S3DESTINATIONDESCRIPTION_BINDING ) ; protocolMarshaller . marshall ( splunkDestinationDescription . getProcessingConfiguration ( ) , PROCESSINGCONFIGURATION_BINDING ) ; protocolMarshaller . marshall ( splunkDestinationDescription . getCloudWatchLoggingOptions ( ) , CLOUDWATCHLOGGINGOPTIONS_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
|
public class AmazonEC2Client { /** * Assigns one or more IPv6 addresses to the specified network interface . You can specify one or more specific IPv6
* addresses , or you can specify the number of IPv6 addresses to be automatically assigned from within the subnet ' s
* IPv6 CIDR block range . You can assign as many IPv6 addresses to a network interface as you can assign private
* IPv4 addresses , and the limit varies per instance type . For information , see < a
* href = " https : / / docs . aws . amazon . com / AWSEC2 / latest / UserGuide / using - eni . html # AvailableIpPerENI " > IP Addresses Per
* Network Interface Per Instance Type < / a > in the < i > Amazon Elastic Compute Cloud User Guide < / i > .
* @ param assignIpv6AddressesRequest
* @ return Result of the AssignIpv6Addresses operation returned by the service .
* @ sample AmazonEC2 . AssignIpv6Addresses
* @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / ec2-2016-11-15 / AssignIpv6Addresses " target = " _ top " > AWS API
* Documentation < / a > */
@ Override public AssignIpv6AddressesResult assignIpv6Addresses ( AssignIpv6AddressesRequest request ) { } }
|
request = beforeClientExecution ( request ) ; return executeAssignIpv6Addresses ( request ) ;
|
public class SessionForRequest { /** * Finds a session .
* @ param request the Http Request
* @ return A session if found in the request attributes . */
public static Optional < Session > find ( HttpRequest < ? > request ) { } }
|
return request . getAttributes ( ) . get ( HttpSessionFilter . SESSION_ATTRIBUTE , Session . class ) ;
|
public class HelperBase { /** * First character to upper case . */
public String toFirstUpper ( String name ) { } }
|
if ( name . length ( ) == 0 ) { return name ; } else { return name . substring ( 0 , 1 ) . toUpperCase ( ) + name . substring ( 1 ) ; }
|
public class ProjectApi { /** * Delete a project issue .
* < pre > < code > DELETE / projects / : id / issues / : issue _ iid < / code > < / pre >
* @ param projectIdOrPath projectIdOrPath the project in the form of an Integer ( ID ) , String ( path ) , or Project instance , required
* @ param issueId the internal ID of a project ' s issue
* @ throws GitLabApiException if any exception occurs
* @ deprecated Will be removed in version 5.0 , replaced by { @ link IssuesApi # deleteIssue ( Object , Integer ) } */
@ Deprecated public void deleteIssue ( Object projectIdOrPath , Integer issueId ) throws GitLabApiException { } }
|
Response . Status expectedStatus = ( isApiVersion ( ApiVersion . V3 ) ? Response . Status . OK : Response . Status . NO_CONTENT ) ; delete ( expectedStatus , getDefaultPerPageParam ( ) , "projects" , getProjectIdOrPath ( projectIdOrPath ) , "issues" , issueId ) ;
|
public class BaseDataSource { /** * Sets properties from a { @ link DriverManager } URL .
* @ param url properties to set */
public void setUrl ( String url ) { } }
|
Properties p = org . postgresql . Driver . parseURL ( url , null ) ; if ( p == null ) { throw new IllegalArgumentException ( "URL invalid " + url ) ; } for ( PGProperty property : PGProperty . values ( ) ) { if ( ! this . properties . containsKey ( property . getName ( ) ) ) { setProperty ( property , property . get ( p ) ) ; } }
|
public class StandardBullhornData { /** * { @ inheritDoc } */
@ Override public FileWrapper addFile ( Class < ? extends FileEntity > type , Integer entityId , MultipartFile file , String externalId , FileParams params ) { } }
|
return this . handleAddFileWithMultipartFile ( type , entityId , file , externalId , params , true ) ;
|
public class BucketConfigurationXmlFactory { /** * / * < LifecycleConfiguration >
* < Rule >
* < ID > logs - rule < / ID >
* < Status > Enabled < / Status >
* < Transition >
* < Days > 30 < / Days >
* < StorageClass > GLACIER < / StorageClass >
* < / Transition >
* < Expiration >
* < Days > 365 < / Days >
* < / Expiration >
* < NoncurrentVersionTransition >
* < NoncurrentDays > 7 < / NoncurrentDays >
* < StorageClass > GLACIER < / StorageClass >
* < / NoncurrentVersionTransition >
* < NoncurrentVersionExpiration >
* < NoncurrentDays > 14 < / NoncurrentDays >
* < / NoncurrentVersionExpiration >
* < Filter > < ! - - A filter can have only one of Prefix , Tag or And . - - >
* < Prefix > logs / < / Prefix >
* < Tag >
* < Key > key1 < / Key >
* < Value > value1 < / Value >
* < / Tag >
* < And >
* < Prefix > logs / < / Prefix >
* < Tag >
* < Key > key1 < / Key >
* < Value > value1 < / Value >
* < / Tag >
* < Tag >
* < Key > key1 < / Key >
* < Value > value1 < / Value >
* < / Tag >
* < / And >
* < / Filter >
* < / Rule >
* < Rule >
* < ID > image - rule < / ID >
* < Prefix > image / < / Prefix >
* < Status > Enabled < / Status >
* < Transition >
* < Date > 2012-12-31T00:00:00.000Z < / Date >
* < StorageClass > GLACIER < / StorageClass >
* < / Transition >
* < Expiration >
* < Date > 2020-12-31T00:00:00.000Z < / Date >
* < / Expiration >
* < AbortIncompleteMultipartUpload >
* < DaysAfterInitiation > 10 < / DaysAfterInitiation >
* < / AbortIncompleteMultipartUpload >
* < / Rule >
* < / LifecycleConfiguration > */
public byte [ ] convertToXmlByteArray ( BucketLifecycleConfiguration config ) throws SdkClientException { } }
|
XmlWriter xml = new XmlWriter ( ) ; xml . start ( "LifecycleConfiguration" ) ; if ( config . getRules ( ) != null ) { for ( Rule rule : config . getRules ( ) ) { writeRule ( xml , rule ) ; } } xml . end ( ) ; return xml . getBytes ( ) ;
|
public class UpdateDomainMetadataRequestMarshaller { /** * Marshall the given parameter object . */
public void marshall ( UpdateDomainMetadataRequest updateDomainMetadataRequest , ProtocolMarshaller protocolMarshaller ) { } }
|
if ( updateDomainMetadataRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( updateDomainMetadataRequest . getFleetArn ( ) , FLEETARN_BINDING ) ; protocolMarshaller . marshall ( updateDomainMetadataRequest . getDomainName ( ) , DOMAINNAME_BINDING ) ; protocolMarshaller . marshall ( updateDomainMetadataRequest . getDisplayName ( ) , DISPLAYNAME_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
|
public class TaskTable { /** * 如果时间匹配则执行相应的Task , 带读锁
* @ param millis 时间毫秒 */
public void executeTaskIfMatch ( long millis ) { } }
|
final Lock readLock = lock . readLock ( ) ; try { readLock . lock ( ) ; executeTaskIfMatchInternal ( millis ) ; } finally { readLock . unlock ( ) ; }
|
public class ResourceBundleELResolver { /** * If the base object is an instance of ResourceBundle , the provided property will first be
* coerced to a String . The Object returned by getObject on the base ResourceBundle will be
* returned . If the base is ResourceBundle , the propertyResolved property of the ELContext
* object must be set to true by this resolver , before returning . If this property is not true
* after this method is called , the caller should ignore the return value .
* @ param context
* The context of this evaluation .
* @ param base
* The bundle to analyze . Only bases of type ResourceBundle are handled by this
* resolver .
* @ param property
* The name of the property to analyze . Will be coerced to a String .
* @ return If the propertyResolved property of ELContext was set to true , then null if property
* is null ; otherwise the Object for the given key ( property coerced to String ) from the
* ResourceBundle . If no object for the given key can be found , then the String " ? ? ? " +
* key + " ? ? ? " .
* @ throws NullPointerException
* if context is null .
* @ throws ELException
* if an exception was thrown while performing the property or variable resolution .
* The thrown exception must be included as the cause property of this exception , if
* available . */
@ Override public Object getValue ( ELContext context , Object base , Object property ) { } }
|
if ( context == null ) { throw new NullPointerException ( "context is null" ) ; } Object result = null ; if ( isResolvable ( base ) ) { if ( property != null ) { try { result = ( ( ResourceBundle ) base ) . getObject ( property . toString ( ) ) ; } catch ( MissingResourceException e ) { result = "???" + property + "???" ; } } context . setPropertyResolved ( true ) ; } return result ;
|
public class MetaClass { /** * Decode an array encoded as a String . This entails a comma separated value enclosed in brackets
* or parentheses
* @ param encoded The String encoded array
* @ return A String array corresponding to the encoded array */
private static final String [ ] decodeArray ( String encoded ) { } }
|
char [ ] chars = encoded . trim ( ) . toCharArray ( ) ; // - - Parse the String
// ( state )
char quoteCloseChar = ( char ) 0 ; List < StringBuilder > terms = new LinkedList < StringBuilder > ( ) ; StringBuilder current = new StringBuilder ( ) ; // ( start / stop overhead )
int start = 0 ; int end = chars . length ; if ( chars [ 0 ] == '(' ) { start += 1 ; end -= 1 ; if ( chars [ end ] != ')' ) throw new IllegalArgumentException ( "Unclosed paren in encoded array: " + encoded ) ; } if ( chars [ 0 ] == '[' ) { start += 1 ; end -= 1 ; if ( chars [ end ] != ']' ) throw new IllegalArgumentException ( "Unclosed bracket in encoded array: " + encoded ) ; } // ( finite state automata )
for ( int i = start ; i < end ; i ++ ) { if ( chars [ i ] == '\\' ) { // ( case : escaped character )
if ( i == chars . length - 1 ) throw new IllegalArgumentException ( "Last character of encoded pair is escape character: " + encoded ) ; current . append ( chars [ i + 1 ] ) ; i += 1 ; } else if ( quoteCloseChar != 0 ) { // ( case : in quotes )
if ( chars [ i ] == quoteCloseChar ) { quoteCloseChar = ( char ) 0 ; } else { current . append ( chars [ i ] ) ; } } else { // ( case : normal )
if ( chars [ i ] == '"' ) { quoteCloseChar = '"' ; } else if ( chars [ i ] == '\'' ) { quoteCloseChar = '\'' ; } else if ( chars [ i ] == ',' ) { // break
terms . add ( current ) ; current = new StringBuilder ( ) ; } else { current . append ( chars [ i ] ) ; } } } // - - Return
if ( current . length ( ) > 0 ) terms . add ( current ) ; String [ ] rtn = new String [ terms . size ( ) ] ; int i = 0 ; for ( StringBuilder b : terms ) { rtn [ i ] = b . toString ( ) . trim ( ) ; i += 1 ; } return rtn ;
|
public class StoreFactoryImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
public ServerState createServerStateFromString ( EDataType eDataType , String initialValue ) { } }
|
ServerState result = ServerState . get ( initialValue ) ; if ( result == null ) throw new IllegalArgumentException ( "The value '" + initialValue + "' is not a valid enumerator of '" + eDataType . getName ( ) + "'" ) ; return result ;
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.