signature
stringlengths 43
39.1k
| implementation
stringlengths 0
450k
|
|---|---|
public class XMLOutputter { /** * Adds an attribute to the current element , with a < code > boolean < / code > value . There must
* currently be an open element .
* The attribute value is surrounded by the quotation mark character ( see
* { @ link # getQuotationMark ( ) } ) .
* @ param name the name of the attribute , not < code > null < / code > .
* @ param value the value of the attribute .
* @ throws IllegalStateException if < code > getState ( ) ! = { @ link # START _ TAG _ OPEN } < / code > .
* @ throws IllegalArgumentException if < code > name = = null < / code > .
* @ throws IOException if an I / O error occurs ; this will set the state to { @ link # ERROR _ STATE } . */
public final void attribute ( String name , boolean value ) throws IllegalStateException , IllegalArgumentException , IOException { } }
|
attribute ( name , value ? "true" : "false" ) ;
|
public class CoverageUtil { /** * TODO : Comment */
public float getSpanPercent ( ILexNameToken name ) { } }
|
int hits = 0 ; int misses = 0 ; ILexLocation span = null ; synchronized ( nameSpans ) { span = nameSpans . get ( name ) ; } synchronized ( allLocations ) { for ( ILexLocation l : allLocations ) { if ( l . getExecutable ( ) && l . within ( span ) ) { if ( l . getHits ( ) > 0 ) { hits ++ ; } else { misses ++ ; } } } } int sum = hits + misses ; return sum == 0 ? 0 : ( float ) ( 1000 * hits / sum ) / 10 ; // NN . N %
|
public class KNNClassifier { /** * Return the predict to every other train vector ' s distance .
* @ return return by Id which is ordered by input sequential . */
@ Override public Map < String , Double > predict ( Tuple predict ) { } }
|
KNNEngine engine = new KNNEngine ( predict , trainingData , k ) ; if ( mode == 1 ) { engine . getDistance ( engine . chebyshevDistance ) ; } else if ( mode == 2 ) { engine . getDistance ( engine . manhattanDistance ) ; } else { engine . getDistance ( engine . euclideanDistance ) ; } predict . label = engine . getResult ( ) ; Map < String , Double > outputMap = new ConcurrentHashMap < > ( ) ; trainingData . parallelStream ( ) . forEach ( x -> outputMap . put ( String . valueOf ( x . id ) , ( Double ) x . getExtra ( ) . get ( DISTANCE ) ) ) ; return outputMap ;
|
public class GenericsUtils { /** * If type is a variable , looks actual variable type , if it contains generics .
* For { @ link ParameterizedType } return actual type parameters , for simple class returns raw class generics .
* Note : returned generics may contain variables inside !
* @ param type type to get generics of
* @ param generics context generics map
* @ return type generics array or empty array */
public static Type [ ] getGenerics ( final Type type , final Map < String , Type > generics ) { } }
|
Type [ ] res = NO_TYPES ; Type analyzingType = type ; if ( type instanceof TypeVariable ) { // if type is pure generic recovering parametrization
analyzingType = declaredGeneric ( ( TypeVariable ) type , generics ) ; } if ( ( analyzingType instanceof ParameterizedType ) && ( ( ParameterizedType ) analyzingType ) . getActualTypeArguments ( ) . length > 0 ) { res = ( ( ParameterizedType ) analyzingType ) . getActualTypeArguments ( ) ; } else if ( type instanceof Class ) { // if type is class return raw declaration
final Class < ? > actual = ( Class < ? > ) analyzingType ; if ( actual . getTypeParameters ( ) . length > 0 ) { res = GenericsResolutionUtils . resolveDirectRawGenerics ( actual ) . values ( ) . toArray ( new Type [ 0 ] ) ; } } return res ;
|
public class DeletePubSubMsgsThread { /** * This get called from MessageProcessor on ME getting stopped . */
@ Override public void stopThread ( StoppableThreadCache cache ) { } }
|
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "stopThread" ) ; this . hasToStop = true ; // Remove this thread from the thread cache
cache . deregisterThread ( this ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "stopThread" ) ;
|
public class CommandFaceDescriptor { /** * { @ inheritDoc } */
public void setCaption ( String shortDescription ) { } }
|
String old = this . caption ; this . caption = shortDescription ; firePropertyChange ( DescribedElement . CAPTION_PROPERTY , old , this . caption ) ;
|
public class Metric { /** * Indicate we are done ( stop the timer ) . Once you call this
* method , subsequent calls have no effect .
* @ return time in nanoseconds from the start of this metric , or - 1
* if { @ code false } was passed in the constructor */
public long done ( ) { } }
|
if ( enabled ) { if ( ! done ) { lastCheckpointNanos = System . nanoTime ( ) ; done = true ; } return lastCheckpointNanos - startNanos ; } return - 1 ;
|
public class CreateMojo { /** * Get the branch info for this revision from the repository . For svn , it is in svn info .
* @ return
* @ throws MojoExecutionException
* @ throws MojoExecutionException */
public String getScmBranch ( ) throws MojoExecutionException { } }
|
try { ScmRepository repository = getScmRepository ( ) ; ScmProvider provider = scmManager . getProviderByRepository ( repository ) ; /* git branch can be obtained directly by a command */
if ( GitScmProviderRepository . PROTOCOL_GIT . equals ( provider . getScmType ( ) ) ) { ScmFileSet fileSet = new ScmFileSet ( scmDirectory ) ; return GitBranchCommand . getCurrentBranch ( getLogger ( ) , ( GitScmProviderRepository ) repository . getProviderRepository ( ) , fileSet ) ; } else if ( provider instanceof HgScmProvider ) { /* hg branch can be obtained directly by a command */
HgOutputConsumer consumer = new HgOutputConsumer ( getLogger ( ) ) ; ScmResult result = HgUtils . execute ( consumer , logger , scmDirectory , new String [ ] { "id" , "-b" } ) ; checkResult ( result ) ; if ( StringUtils . isNotEmpty ( consumer . getOutput ( ) ) ) { return consumer . getOutput ( ) ; } } } catch ( ScmException e ) { getLog ( ) . warn ( "Cannot get the branch information from the git repository: \n" + e . getLocalizedMessage ( ) ) ; } return getScmBranchFromUrl ( ) ;
|
public class RequestUtils { /** * Compares ipAddress with one of the ip addresses listed
* @ param ipAddress ip address to compare
* @ param whitelist list of ip addresses . . . an be a range ip . . . in format 123.123.123.123/28 where last part is a subnet range
* @ return true if allowed , false if not */
public static boolean isIpAddressAllowed ( String ipAddress , String ... whitelist ) { } }
|
if ( StringUtils . isNullOrEmpty ( ipAddress ) || whitelist == null || whitelist . length == 0 ) { return false ; } for ( String address : whitelist ) { if ( ipAddress . equals ( address ) ) { return true ; } if ( address . contains ( "/" ) ) { // is range definition
SubnetUtils utils = new SubnetUtils ( address ) ; utils . setInclusiveHostCount ( true ) ; if ( utils . getInfo ( ) . isInRange ( ipAddress ) ) { return true ; } } } return false ;
|
public class AssignInstanceRequest { /** * The layer ID , which must correspond to a custom layer . You cannot assign a registered instance to a built - in
* layer .
* @ return The layer ID , which must correspond to a custom layer . You cannot assign a registered instance to a
* built - in layer . */
public java . util . List < String > getLayerIds ( ) { } }
|
if ( layerIds == null ) { layerIds = new com . amazonaws . internal . SdkInternalList < String > ( ) ; } return layerIds ;
|
public class TopKSampler { /** * Start to record samples
* @ param capacity
* Number of sample items to keep in memory , the lower this is
* the less accurate results are . For best results use value
* close to cardinality , but understand the memory trade offs . */
public synchronized void beginSampling ( int capacity ) { } }
|
if ( ! enabled ) { summary = new StreamSummary < T > ( capacity ) ; hll = new HyperLogLogPlus ( 14 ) ; enabled = true ; }
|
public class BindMapHelper { /** * Parse a map .
* @ param context the context
* @ param parserWrapper the parser wrapper
* @ param map the map
* @ return map */
public static Map < String , Object > parseMap ( AbstractContext context , ParserWrapper parserWrapper , Map < String , Object > map ) { } }
|
switch ( context . getSupportedFormat ( ) ) { case XML : throw ( new KriptonRuntimeException ( context . getSupportedFormat ( ) + " context does not support parse direct map parsing" ) ) ; default : JacksonWrapperParser wrapperParser = ( JacksonWrapperParser ) parserWrapper ; JsonParser parser = wrapperParser . jacksonParser ; map . clear ( ) ; return parseMap ( context , parser , map , false ) ; }
|
public class Connection { /** * { @ inheritDoc }
* @ throws java . sql . SQLFeatureNotSupportedException if | autoGeneratedKeys | is Statement . RETURN _ GENERATED _ KEYS
* @ see # prepareStatement ( java . lang . String ) */
public PreparedStatement prepareStatement ( final String sql , final int autoGeneratedKeys ) throws SQLException { } }
|
checkClosed ( ) ; return new acolyte . jdbc . PreparedStatement ( this , sql , autoGeneratedKeys , null , null , this . handler . getStatementHandler ( ) ) ;
|
public class CompositeTextWatcher { /** * Add a { @ link TextWatcher } with a specified Id and whether or not it is enabled by default
* @ param id the id of the { @ link TextWatcher } to add
* @ param watcher the { @ link TextWatcher } to add
* @ param enabled whether or not it is enabled by default */
public void addWatcher ( int id , TextWatcher watcher , boolean enabled ) { } }
|
mWatchers . put ( id , watcher ) ; mEnabledKeys . put ( id , enabled ) ;
|
public class BigtableTableAdminClient { /** * Constructs an instance of BigtableTableAdminClient with the given instanceName and stub . */
public static BigtableTableAdminClient create ( @ Nonnull String projectId , @ Nonnull String instanceId , @ Nonnull EnhancedBigtableTableAdminStub stub ) { } }
|
return new BigtableTableAdminClient ( projectId , instanceId , stub ) ;
|
public class TimeBasedOneTimePasswordHelper { /** * Decode base - 32 method . I didn ' t want to add a dependency to Apache Codec just for this decode method . Exposed for
* testing . */
static byte [ ] decodeBase32 ( String str ) { } }
|
// each base - 32 character encodes 5 bits
int numBytes = ( ( str . length ( ) * 5 ) + 7 ) / 8 ; byte [ ] result = new byte [ numBytes ] ; int resultIndex = 0 ; int which = 0 ; int working = 0 ; for ( int i = 0 ; i < str . length ( ) ; i ++ ) { char ch = str . charAt ( i ) ; int val ; if ( ch >= 'a' && ch <= 'z' ) { val = ch - 'a' ; } else if ( ch >= 'A' && ch <= 'Z' ) { val = ch - 'A' ; } else if ( ch >= '2' && ch <= '7' ) { val = 26 + ( ch - '2' ) ; } else if ( ch == '=' ) { // special case
which = 0 ; break ; } else { throw new IllegalArgumentException ( "Invalid base-32 character: " + ch ) ; } /* * There are probably better ways to do this but this seemed the most straightforward . */
switch ( which ) { case 0 : // all 5 bits is top 5 bits
working = ( val & 0x1F ) << 3 ; which = 1 ; break ; case 1 : // top 3 bits is lower 3 bits
working |= ( val & 0x1C ) >> 2 ; result [ resultIndex ++ ] = ( byte ) working ; // lower 2 bits is upper 2 bits
working = ( val & 0x03 ) << 6 ; which = 2 ; break ; case 2 : // all 5 bits is mid 5 bits
working |= ( val & 0x1F ) << 1 ; which = 3 ; break ; case 3 : // top 1 bit is lowest 1 bit
working |= ( val & 0x10 ) >> 4 ; result [ resultIndex ++ ] = ( byte ) working ; // lower 4 bits is top 4 bits
working = ( val & 0x0F ) << 4 ; which = 4 ; break ; case 4 : // top 4 bits is lowest 4 bits
working |= ( val & 0x1E ) >> 1 ; result [ resultIndex ++ ] = ( byte ) working ; // lower 1 bit is top 1 bit
working = ( val & 0x01 ) << 7 ; which = 5 ; break ; case 5 : // all 5 bits is mid 5 bits
working |= ( val & 0x1F ) << 2 ; which = 6 ; break ; case 6 : // top 2 bits is lowest 2 bits
working |= ( val & 0x18 ) >> 3 ; result [ resultIndex ++ ] = ( byte ) working ; // lower 3 bits of byte 6 is top 3 bits
working = ( val & 0x07 ) << 5 ; which = 7 ; break ; case 7 : // all 5 bits is lower 5 bits
working |= ( val & 0x1F ) ; result [ resultIndex ++ ] = ( byte ) working ; which = 0 ; break ; } } if ( which != 0 ) { result [ resultIndex ++ ] = ( byte ) working ; } if ( resultIndex != result . length ) { result = Arrays . copyOf ( result , resultIndex ) ; } return result ;
|
public class ClassNode { /** * Specify the class represented by this ` ClassNode ` is annotated
* by an annotation class specified by the name
* @ param name the name of the annotation class
* @ return this ` ClassNode ` instance */
public ClassNode annotatedWith ( String name ) { } }
|
ClassNode anno = infoBase . node ( name ) ; this . annotations . add ( anno ) ; anno . annotated . add ( this ) ; return this ;
|
public class JJTree { /** * Assembles the command line arguments for the invocation of JJTree according
* to the configuration . < br / >
* < br / >
* < strong > Note : < / strong > To prevent conflicts with JavaCC options that might
* be set directly in the grammar file , only those parameters that have been
* explicitly set are passed on the command line .
* @ return A string array that represents the arguments to use for JJTree . */
private String [ ] generateArguments ( ) { } }
|
final List < String > argsList = new ArrayList < > ( ) ; if ( StringUtils . isNotEmpty ( this . grammarEncoding ) ) { argsList . add ( "-GRAMMAR_ENCODING=" + this . grammarEncoding ) ; } if ( StringUtils . isNotEmpty ( this . outputEncoding ) ) { argsList . add ( "-OUTPUT_ENCODING=" + this . outputEncoding ) ; } if ( StringUtils . isNotEmpty ( jdkVersion ) ) { argsList . add ( "-JDK_VERSION=" + this . jdkVersion ) ; } if ( this . buildNodeFiles != null ) { argsList . add ( "-BUILD_NODE_FILES=" + this . buildNodeFiles ) ; } if ( this . multi != null ) { argsList . add ( "-MULTI=" + this . multi ) ; } if ( this . nodeDefaultVoid != null ) { argsList . add ( "-NODE_DEFAULT_VOID=" + this . nodeDefaultVoid ) ; } if ( StringUtils . isNotEmpty ( this . nodeClass ) ) { argsList . add ( "-NODE_CLASS=" + this . nodeClass ) ; } if ( StringUtils . isNotEmpty ( this . nodeFactory ) ) { argsList . add ( "-NODE_FACTORY=" + this . nodeFactory ) ; } if ( StringUtils . isNotEmpty ( this . nodePackage ) ) { argsList . add ( "-NODE_PACKAGE=" + this . nodePackage ) ; } if ( StringUtils . isNotEmpty ( this . nodePrefix ) ) { argsList . add ( "-NODE_PREFIX=" + this . nodePrefix ) ; } if ( this . nodeScopeHook != null ) { argsList . add ( "-NODE_SCOPE_HOOK=" + this . nodeScopeHook ) ; } if ( this . nodeUsesParser != null ) { argsList . add ( "-NODE_USES_PARSER=" + this . nodeUsesParser ) ; } if ( this . trackTokens != null ) { argsList . add ( "-TRACK_TOKENS=" + this . trackTokens ) ; } if ( this . visitor != null ) { argsList . add ( "-VISITOR=" + this . visitor ) ; } if ( StringUtils . isNotEmpty ( this . visitorDataType ) ) { argsList . add ( "-VISITOR_DATA_TYPE=" + this . visitorDataType ) ; } if ( StringUtils . isNotEmpty ( this . visitorReturnType ) ) { argsList . add ( "-VISITOR_RETURN_TYPE=" + this . visitorReturnType ) ; } if ( StringUtils . isNotEmpty ( this . visitorException ) ) { argsList . add ( "-VISITOR_EXCEPTION=" + this . visitorException ) ; } if ( this . outputDirectory != null ) { argsList . add ( "-OUTPUT_DIRECTORY=" + this . outputDirectory . getAbsolutePath ( ) ) ; } if ( StringUtils . isNotEmpty ( this . javaTemplateType ) ) { argsList . add ( "-JAVA_TEMPLATE_TYPE=" + this . javaTemplateType ) ; } if ( this . inputFile != null ) { argsList . add ( this . inputFile . getAbsolutePath ( ) ) ; } return argsList . toArray ( new String [ argsList . size ( ) ] ) ;
|
public class FldExporter { /** * Saves the engine as a FuzzyLite Dataset into the specified file
* @ param file is file to save the dataset into
* @ param engine is the engine to export
* @ param values is the number of values to export
* @ param scope indicates the scope of the values
* @ param activeVariables contains the input variables to generate values for .
* The input variables must be in the same order as in the engine . A value of
* fl : : null indicates the variable is not active .
* @ throws IOException if any error occurs upon writing to the file */
public void toFile ( File file , Engine engine , int values , ScopeOfValues scope , List < InputVariable > activeVariables ) throws IOException { } }
|
if ( ! file . createNewFile ( ) ) { FuzzyLite . logger ( ) . log ( Level . FINE , "Replacing file {0}" , file . getAbsolutePath ( ) ) ; } BufferedWriter writer = new BufferedWriter ( new OutputStreamWriter ( new FileOutputStream ( file ) , FuzzyLite . UTF_8 ) ) ; try { write ( engine , writer , values , scope , activeVariables ) ; } catch ( RuntimeException ex ) { throw ex ; } catch ( IOException ex ) { throw ex ; } finally { writer . close ( ) ; }
|
public class ClassInfo { /** * Add an implemented interface to this class .
* @ param interfaceName
* the interface name
* @ param classNameToClassInfo
* the map from class name to class info */
void addImplementedInterface ( final String interfaceName , final Map < String , ClassInfo > classNameToClassInfo ) { } }
|
final ClassInfo interfaceClassInfo = getOrCreateClassInfo ( interfaceName , /* classModifiers = */
Modifier . INTERFACE , classNameToClassInfo ) ; interfaceClassInfo . isInterface = true ; interfaceClassInfo . modifiers |= Modifier . INTERFACE ; this . addRelatedClass ( RelType . IMPLEMENTED_INTERFACES , interfaceClassInfo ) ; interfaceClassInfo . addRelatedClass ( RelType . CLASSES_IMPLEMENTING , this ) ;
|
public class ProfilingTimer { /** * Append the given string to the log message of the current subtask */
public void appendToLog ( String logAppendMessage ) { } }
|
ProfilingTimerNode currentNode = current . get ( ) ; if ( currentNode != null ) { currentNode . appendToLog ( logAppendMessage ) ; }
|
public class PushBroadcastReceiver { /** * Dispatch received push message to external listener .
* @ param listener Push message listener .
* @ param message Received push message to be dispatched . */
private void dispatchMessage ( PushMessageListener listener , RemoteMessage message ) { } }
|
if ( listener != null ) { mainThreadHandler . post ( ( ) -> listener . onMessageReceived ( message ) ) ; }
|
public class DefaultExceptionFactory { /** * Create an { @ link LdapException } from an { @ link ILdapResult }
* @ param result the result code
* @ return a new LDAPException */
public static LdapException create ( ILdapResult result ) { } }
|
return new LdapException ( result . getResultCode ( ) , result . getMessage ( ) , null ) ;
|
public class CollectionUtils { /** * Wrap hash set .
* @ param < T > the type parameter
* @ param source the source
* @ return the set */
public static < T > HashSet < T > wrapHashSet ( final T ... source ) { } }
|
val list = new HashSet < T > ( ) ; addToCollection ( list , source ) ; return list ;
|
public class ImageSessionFilter { /** * 取出保存的所有UploadFile集合 , 但不从Session删除这些集合
* @ param request
* @ return */
public Collection getAllUploadFile ( HttpServletRequest request ) { } }
|
Collection uploadList = null ; try { HttpSession session = request . getSession ( ) ; if ( session != null ) { uploadList = ( Collection ) session . getAttribute ( PIC_NAME_PACKAGE ) ; } } catch ( Exception ex ) { Debug . logError ( "[JdonFramework] not found the upload files in session" + ex , module ) ; } return uploadList ;
|
public class TagTypeImpl { /** * If not already created , a new < code > attribute < / code > element will be created and returned .
* Otherwise , the first existing < code > attribute < / code > element will be returned .
* @ return the instance defined for the element < code > attribute < / code > */
public TldAttributeType < TagType < T > > getOrCreateAttribute ( ) { } }
|
List < Node > nodeList = childNode . get ( "attribute" ) ; if ( nodeList != null && nodeList . size ( ) > 0 ) { return new TldAttributeTypeImpl < TagType < T > > ( this , "attribute" , childNode , nodeList . get ( 0 ) ) ; } return createAttribute ( ) ;
|
public class VisitorState { /** * Given the binary name of a class , returns the { @ link Type } .
* < p > If this method returns null , the compiler doesn ' t have access to this type , which means that
* if you are comparing other types to this for equality or the subtype relation , your result
* would always be false even if it could create the type . Thus it might be best to bail out early
* in your matcher if this method returns null on your type of interest .
* @ param typeStr the JLS 13.1 binary name of the class , e . g . { @ code " java . util . Map $ Entry " }
* @ return the { @ link Type } , or null if it cannot be found */
@ Nullable public Type getTypeFromString ( String typeStr ) { } }
|
try { return typeCache . get ( typeStr ) . orNull ( ) ; } catch ( ExecutionException e ) { return null ; }
|
public class GammaDistribution { /** * Approximate probit for chi squared distribution
* Based on first half of algorithm AS 91
* Reference :
* D . J . Best , D . E . Roberts < br >
* Algorithm AS 91 : The percentage points of the χ2 distribution < br >
* Journal of the Royal Statistical Society . Series C ( Applied Statistics )
* @ param p Probit value
* @ param nu Shape parameter for Chi , nu = 2 * k
* @ param g log ( nu )
* @ return Probit for chi squared */
@ Reference ( authors = "D. J. Best, D. E. Roberts" , title = "Algorithm AS 91: The percentage points of the χ² distribution" , booktitle = "Journal of the Royal Statistical Society. Series C (Applied Statistics)" , url = "https://doi.org/10.2307/2347113" , bibkey = "doi:10.2307/2347113" ) protected static double chisquaredProbitApproximation ( final double p , double nu , double g ) { } }
|
final double EPS1 = 1e-14 ; // Approximation quality
// Sanity checks
if ( Double . isNaN ( p ) || Double . isNaN ( nu ) ) { return Double . NaN ; } // Range check
if ( p <= 0 ) { return 0 ; } if ( p >= 1 ) { return Double . POSITIVE_INFINITY ; } // Invalid parameters
if ( nu <= 0 ) { return Double . NaN ; } // Shape of gamma distribution , " XX " in AS 91
final double k = 0.5 * nu ; // For small chi squared values - AS 91
final double logp = FastMath . log ( p ) ; if ( nu < - 1.24 * logp ) { // FIXME : implement and use logGammap1 instead - more stable ?
// final double lgam1pa = ( alpha < 0.5 ) ? logGammap1 ( alpha ) :
// ( FastMath . log ( alpha ) + g ) ;
// return FastMath . exp ( ( lgam1pa + logp ) / alpha + MathUtil . LOG2 ) ;
// This is literal AS 91 , above is the GNU R variant .
return FastMath . pow ( p * k * FastMath . exp ( g + k * MathUtil . LOG2 ) , 1. / k ) ; } else if ( nu > 0.32 ) { // Wilson and Hilferty estimate : - AS 91 at 3
final double x = NormalDistribution . quantile ( p , 0 , 1 ) ; final double p1 = 2. / ( 9. * nu ) ; final double a = x * FastMath . sqrt ( p1 ) + 1 - p1 ; double ch = nu * a * a * a ; // Better approximation for p tending to 1:
if ( ch > 2.2 * nu + 6 ) { ch = - 2 * ( FastMath . log1p ( - p ) - ( k - 1 ) * FastMath . log ( 0.5 * ch ) + g ) ; } return ch ; } else { // nu < = 0.32 , AS 91 at 1
final double C7 = 4.67 , C8 = 6.66 , C9 = 6.73 , C10 = 13.32 ; final double ag = FastMath . log1p ( - p ) + g + ( k - 1 ) * MathUtil . LOG2 ; double ch = 0.4 ; while ( true ) { final double p1 = 1 + ch * ( C7 + ch ) ; final double p2 = ch * ( C9 + ch * ( C8 + ch ) ) ; final double t = - 0.5 + ( C7 + 2 * ch ) / p1 - ( C9 + ch * ( C10 + 3 * ch ) ) / p2 ; final double delta = ( 1 - FastMath . exp ( ag + 0.5 * ch ) * p2 / p1 ) / t ; ch -= delta ; if ( Math . abs ( delta ) <= EPS1 * Math . abs ( ch ) ) { return ch ; } } }
|
public class OkHttpSimpleTextRequest { /** * will be invoked in remote service */
@ Override public String loadDataFromNetwork ( ) throws Exception { } }
|
try { Ln . d ( "Call web service " + url ) ; OkUrlFactory urlFactory = new OkUrlFactory ( getOkHttpClient ( ) ) ; HttpURLConnection connection = urlFactory . open ( new URL ( url ) ) ; return IOUtils . toString ( connection . getInputStream ( ) ) ; } catch ( final MalformedURLException e ) { Ln . e ( e , "Unable to create URL" ) ; throw e ; } catch ( final IOException e ) { Ln . e ( e , "Unable to download content" ) ; throw e ; }
|
public class Http2Stream { /** * Accept headers from the network and store them until the client calls { @ link # takeHeaders } , or
* { @ link FramingSource # read } them . */
void receiveHeaders ( Headers headers , boolean inFinished ) { } }
|
assert ( ! Thread . holdsLock ( Http2Stream . this ) ) ; boolean open ; synchronized ( this ) { if ( ! hasResponseHeaders || ! inFinished ) { hasResponseHeaders = true ; headersQueue . add ( headers ) ; } else { this . source . trailers = headers ; } if ( inFinished ) { this . source . finished = true ; } open = isOpen ( ) ; notifyAll ( ) ; } if ( ! open ) { connection . removeStream ( id ) ; }
|
public class Message { /** * Sets the message format arguments .
* @ param args the message arguments . */
public void setArgs ( final Serializable ... args ) { } }
|
this . args = args == null || args . length == 0 ? null : args ;
|
public class StorageProviderFactoryImpl { /** * Removes a particular storage provider from the cache , which will
* require that the connection be recreated on the next call .
* @ param storageAccountId - the ID of the storage provider account */
@ Override public void expireStorageProvider ( String storageAccountId ) { } }
|
storageAccountId = checkStorageAccountId ( storageAccountId ) ; log . info ( "Expiring storage provider connection! Storage account id: {}" , storageAccountId ) ; storageProviders . remove ( storageAccountId ) ;
|
public class Journal { /** * Format the local storage with the given namespace . */
private void formatImage ( NamespaceInfo nsInfo ) throws IOException { } }
|
Preconditions . checkState ( nsInfo . getNamespaceID ( ) != 0 , "can't format with uninitialized namespace info: %s" , nsInfo . toColonSeparatedString ( ) ) ; LOG . info ( "Formatting image " + this . getJournalId ( ) + " with namespace info: (" + nsInfo . toColonSeparatedString ( ) + ")" ) ; imageStorage . backupDirs ( ) ; imageStorage . format ( nsInfo ) ; // clear obsolete image digests
checkpointImageDigests . clear ( ) ;
|
public class JsonReader { /** * Read the Json from the passed File .
* @ param aFile
* The file containing the Json to be parsed . May not be
* < code > null < / code > .
* @ param aFallbackCharset
* The charset to be used in case no is BOM is present . May not be
* < code > null < / code > .
* @ return < code > null < / code > if reading failed , the Json declarations
* otherwise . */
@ Nullable public static IJson readFromFile ( @ Nonnull final File aFile , @ Nonnull final Charset aFallbackCharset ) { } }
|
return readFromFile ( aFile , aFallbackCharset , null ) ;
|
public class TreeHashGenerator { /** * Calculates a hex encoded binary hash using a tree hashing algorithm for
* the data in the specified input stream . The method will consume all the
* inputStream and close it when returned .
* @ param input
* The input stream containing the data to hash .
* @ return The hex encoded binary tree hash for the data in the specified
* input stream .
* @ throws AmazonClientException
* If problems were encountered reading the data or calculating
* the hash . */
public static String calculateTreeHash ( InputStream input ) throws AmazonClientException { } }
|
try { TreeHashInputStream treeHashInputStream = new TreeHashInputStream ( input ) ; byte [ ] buffer = new byte [ 1024 ] ; while ( treeHashInputStream . read ( buffer , 0 , buffer . length ) != - 1 ) ; // closing is currently required to compute the checksum
treeHashInputStream . close ( ) ; return calculateTreeHash ( treeHashInputStream . getChecksums ( ) ) ; } catch ( Exception e ) { throw new AmazonClientException ( "Unable to compute hash" , e ) ; }
|
public class TrustedCertificates { /** * Loads X509 certificates and signing policy files from specified
* locations . The locations can be either files or
* directories . The directories will be automatically traversed
* and all files in the form of < i > hashcode . number < / i > and will be
* loaded automatically as trusted certificates . An attempt will
* be made to load signing policy for the CA associated with
* that hashcode from & lt ; hashcode & gt ; . signing _ policy . If policy file is
* not found , no error will be thrown , only path validation code
* enforces the signing policy requirement .
* @ param locations a list of certificate files / directories to load
* the certificates from . The locations are comma
* separated .
* @ return < code > java . security . cert . X509Certificate < / code > an array
* of loaded certificates */
public static X509Certificate [ ] loadCertificates ( String locations ) { } }
|
TrustedCertificates tc = TrustedCertificates . load ( locations ) ; return ( tc == null ) ? null : tc . getCertificates ( ) ;
|
public class AbstractCluster { /** * 通知状态变成不可用 , 主要是 : < br >
* 1 . 注册中心删除 , 更新节点后变成不可用时 < br >
* 2 . 连接断线后 ( 心跳 + 调用 ) , 如果是可用节点为空 */
public void notifyStateChangeToUnavailable ( ) { } }
|
final List < ConsumerStateListener > onprepear = consumerConfig . getOnAvailable ( ) ; if ( onprepear != null ) { AsyncRuntime . getAsyncThreadPool ( ) . execute ( new Runnable ( ) { @ Override public void run ( ) { // 状态变化通知监听器
for ( ConsumerStateListener listener : onprepear ) { try { listener . onUnavailable ( consumerBootstrap . getProxyIns ( ) ) ; } catch ( Exception e ) { LOGGER . error ( "Failed to notify consumer state listener when state change to unavailable" ) ; } } } } ) ; }
|
public class EnvironmentsInner { /** * Create or replace an existing Environment .
* @ param resourceGroupName The name of the resource group .
* @ param labAccountName The name of the lab Account .
* @ param labName The name of the lab .
* @ param environmentSettingName The name of the environment Setting .
* @ param environmentName The name of the environment .
* @ param environment Represents an environment instance
* @ param serviceCallback the async ServiceCallback to handle successful and failed responses .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the { @ link ServiceFuture } object */
public ServiceFuture < EnvironmentInner > createOrUpdateAsync ( String resourceGroupName , String labAccountName , String labName , String environmentSettingName , String environmentName , EnvironmentInner environment , final ServiceCallback < EnvironmentInner > serviceCallback ) { } }
|
return ServiceFuture . fromResponse ( createOrUpdateWithServiceResponseAsync ( resourceGroupName , labAccountName , labName , environmentSettingName , environmentName , environment ) , serviceCallback ) ;
|
public class XMLResourceBundle { /** * Gets the value from the { @ link ResourceBundle } for the supplied message key and additional details .
* @ param aMessage A message key
* @ param aDetailsArray Additional details for the message
* @ return The value of the bundle message */
public String get ( final String aMessage , final Object ... aDetailsArray ) { } }
|
final String [ ] details = new String [ aDetailsArray . length ] ; for ( int index = 0 ; index < details . length ; index ++ ) { details [ index ] = aDetailsArray [ index ] . toString ( ) ; } LOGGER . debug ( MessageCodes . UTIL_026 , aMessage , details ) ; return StringUtils . format ( super . getString ( aMessage ) , details ) ;
|
public class FNORGImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
@ Override public Object eGet ( int featureID , boolean resolve , boolean coreType ) { } }
|
switch ( featureID ) { case AfplibPackage . FNORG__RESERVED : return getReserved ( ) ; case AfplibPackage . FNORG__CHAR_ROT : return getCharRot ( ) ; case AfplibPackage . FNORG__MAX_BOSET : return getMaxBOset ( ) ; case AfplibPackage . FNORG__MAX_CHAR_INC : return getMaxCharInc ( ) ; case AfplibPackage . FNORG__SP_CHAR_INC : return getSpCharInc ( ) ; case AfplibPackage . FNORG__MAX_BEXT : return getMaxBExt ( ) ; case AfplibPackage . FNORG__ORNT_FLGS : return getOrntFlgs ( ) ; case AfplibPackage . FNORG__RESERVED2 : return getReserved2 ( ) ; case AfplibPackage . FNORG__EM_SP_INC : return getEmSpInc ( ) ; case AfplibPackage . FNORG__RESERVED3 : return getReserved3 ( ) ; case AfplibPackage . FNORG__FIG_SP_INC : return getFigSpInc ( ) ; case AfplibPackage . FNORG__NOM_CHAR_INC : return getNomCharInc ( ) ; case AfplibPackage . FNORG__DEF_BINC : return getDefBInc ( ) ; case AfplibPackage . FNORG__MIN_ASP : return getMinASp ( ) ; } return super . eGet ( featureID , resolve , coreType ) ;
|
public class ClassGenerator { /** * See if any of the parsed types in the given list needs warning suppression . */
boolean needsSuppressWarnings ( List < MessageType > msgTypes ) { } }
|
return msgTypes . stream ( ) . anyMatch ( t -> t . accept ( suppressWarningsVisitor , null ) ) ;
|
public class SimpleBloomFilter { /** * Computes the required number of bits needed by the Bloom Filter as a factor of the approximate number of elements
* to be added to the filter along with the desired , acceptable false positive rate ( probability ) .
* m = n * ( log p ) / ( log 2 ) ^ 2
* m is the required number of bits needed for the Bloom Filter .
* n is the approximate number of elements to be added to the bloom filter
* p is the acceptable false positive rate between 0.0 and 1.0 exclusive
* @ param approximateNumberOfElements integer value indicating the approximate , estimated number of elements
* the user expects will be added to the Bloom Filter .
* @ param acceptableFalsePositiveRate a floating point value indicating the acceptable percentage of false positives
* returned by the Bloom Filter .
* @ return the required number of bits needed by the Bloom Filter . */
protected static int computeRequiredNumberOfBits ( double approximateNumberOfElements , double acceptableFalsePositiveRate ) { } }
|
double numberOfBits = Math . abs ( ( approximateNumberOfElements * Math . log ( acceptableFalsePositiveRate ) ) / Math . pow ( Math . log ( 2.0d ) , 2.0d ) ) ; return Double . valueOf ( Math . ceil ( numberOfBits ) ) . intValue ( ) ;
|
public class ManagedChannelBuilder { /** * Creates a channel with the target ' s address and port number .
* @ see # forTarget ( String )
* @ since 1.0.0 */
public static ManagedChannelBuilder < ? > forAddress ( String name , int port ) { } }
|
return ManagedChannelProvider . provider ( ) . builderForAddress ( name , port ) ;
|
public class SwaggerBuilder { /** * Returns the Tag for a controller .
* @ param controllerClass
* @ return a controller tag or null */
protected Tag getControllerTag ( Class < ? extends Controller > controllerClass ) { } }
|
if ( controllerClass . isAnnotationPresent ( ApiOperations . class ) ) { ApiOperations annotation = controllerClass . getAnnotation ( ApiOperations . class ) ; io . swagger . models . Tag tag = new io . swagger . models . Tag ( ) ; tag . setName ( Optional . fromNullable ( Strings . emptyToNull ( annotation . tag ( ) ) ) . or ( controllerClass . getSimpleName ( ) ) ) ; tag . setDescription ( translate ( annotation . descriptionKey ( ) , annotation . description ( ) ) ) ; if ( ! Strings . isNullOrEmpty ( annotation . externalDocs ( ) ) ) { ExternalDocs docs = new ExternalDocs ( ) ; docs . setUrl ( annotation . externalDocs ( ) ) ; tag . setExternalDocs ( docs ) ; } if ( ! Strings . isNullOrEmpty ( tag . getDescription ( ) ) ) { return tag ; } } return null ;
|
public class AfplibPackageImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
public EClass getMDR ( ) { } }
|
if ( mdrEClass == null ) { mdrEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( AfplibPackage . eNS_URI ) . getEClassifiers ( ) . get ( 294 ) ; } return mdrEClass ;
|
public class ExifReader { /** * Reads TIFF formatted Exif data at a specified offset within a { @ link RandomAccessReader } . */
public void extract ( @ NotNull final RandomAccessReader reader , @ NotNull final Metadata metadata , int readerOffset , @ Nullable Directory parentDirectory ) { } }
|
ExifTiffHandler exifTiffHandler = new ExifTiffHandler ( metadata , parentDirectory ) ; try { // Read the TIFF - formatted Exif data
new TiffReader ( ) . processTiff ( reader , exifTiffHandler , readerOffset ) ; } catch ( TiffProcessingException e ) { exifTiffHandler . error ( "Exception processing TIFF data: " + e . getMessage ( ) ) ; // TODO what do to with this error state ?
e . printStackTrace ( System . err ) ; } catch ( IOException e ) { exifTiffHandler . error ( "Exception processing TIFF data: " + e . getMessage ( ) ) ; // TODO what do to with this error state ?
e . printStackTrace ( System . err ) ; }
|
public class Sequential { /** * Checks to see if the subscriber is a predicated subscriber , and if it
* applies .
* @ param s
* - The subscriber to check .
* @ param message
* - The message to check .
* @ return If the subscriber is not predicated or it is and applies , then it
* returns true . If it ' s a predicated subscriber , and it doesn ' t
* apply , then it returns false . */
private boolean predicateApplies ( Subscriber < ? > s , Object message ) { } }
|
if ( s instanceof PredicatedSubscriber && ! ( ( PredicatedSubscriber < ? > ) s ) . appliesO ( message ) ) { return false ; } return true ;
|
public class cufftHandle { /** * Set the size of this plan
* @ param x Size in x
* @ param y Size in y
* @ param z Size in z */
void setSize ( int x , int y , int z ) { } }
|
this . sizeX = x ; this . sizeY = y ; this . sizeZ = z ;
|
public class ObjectAccumulator { /** * Increases the count of object o by inc and returns the new count value
* @ param o
* @ param inc
* @ return */
public double incBy ( K o , double inc ) { } }
|
Counter c = countMap . get ( o ) ; if ( c == null ) { c = new Counter ( ) ; countMap . put ( o , c ) ; } c . count += inc ; return c . count ;
|
public class NodeTypeSchemata { /** * Determine if the session overrides any namespace mappings used by this schemata .
* @ param session the session ; may not be null
* @ return true if the session overrides one or more namespace mappings used in this schemata , or false otherwise */
private boolean overridesNamespaceMappings ( JcrSession session ) { } }
|
NamespaceRegistry registry = session . context ( ) . getNamespaceRegistry ( ) ; if ( registry instanceof LocalNamespaceRegistry ) { Set < Namespace > localNamespaces = ( ( LocalNamespaceRegistry ) registry ) . getLocalNamespaces ( ) ; if ( localNamespaces . isEmpty ( ) ) { // There are no local mappings . . .
return false ; } for ( Namespace namespace : localNamespaces ) { if ( prefixesByUris . containsKey ( namespace . getNamespaceUri ( ) ) ) return true ; } // None of the local namespace mappings overrode any namespaces used by this schemata . . .
return false ; } // We can ' t find the local mappings , so brute - force it . . .
for ( Namespace namespace : registry . getNamespaces ( ) ) { String expectedPrefix = prefixesByUris . get ( namespace . getNamespaceUri ( ) ) ; if ( expectedPrefix == null ) { // This namespace is not used by this schemata . . .
continue ; } if ( ! namespace . getPrefix ( ) . equals ( expectedPrefix ) ) return true ; } return false ;
|
public class OWLSubDataPropertyOfAxiomImpl_CustomFieldSerializer { /** * Deserializes the content of the object from the
* { @ link com . google . gwt . user . client . rpc . SerializationStreamReader } .
* @ param streamReader the { @ link com . google . gwt . user . client . rpc . SerializationStreamReader } to read the
* object ' s content from
* @ param instance the object instance to deserialize
* @ throws com . google . gwt . user . client . rpc . SerializationException
* if the deserialization operation is not
* successful */
@ Override public void deserializeInstance ( SerializationStreamReader streamReader , OWLSubDataPropertyOfAxiomImpl instance ) throws SerializationException { } }
|
deserialize ( streamReader , instance ) ;
|
public class PdfStamper { /** * Sets the encryption options for this document . The userPassword and the
* ownerPassword can be null or have zero length . In this case the ownerPassword
* is replaced by a random string . The open permissions for the document can be
* AllowPrinting , AllowModifyContents , AllowCopy , AllowModifyAnnotations ,
* AllowFillIn , AllowScreenReaders , AllowAssembly and AllowDegradedPrinting .
* The permissions can be combined by ORing them .
* @ param userPassword the user password . Can be null or empty
* @ param ownerPassword the owner password . Can be null or empty
* @ param permissions the user permissions
* @ param encryptionType the type of encryption . It can be one of STANDARD _ ENCRYPTION _ 40 , STANDARD _ ENCRYPTION _ 128 or ENCRYPTION _ AES128.
* Optionally DO _ NOT _ ENCRYPT _ METADATA can be ored to output the metadata in cleartext
* @ throws DocumentException if the document is already open */
public void setEncryption ( byte userPassword [ ] , byte ownerPassword [ ] , int permissions , int encryptionType ) throws DocumentException { } }
|
if ( stamper . isAppend ( ) ) throw new DocumentException ( "Append mode does not support changing the encryption status." ) ; if ( stamper . isContentWritten ( ) ) throw new DocumentException ( "Content was already written to the output." ) ; stamper . setEncryption ( userPassword , ownerPassword , permissions , encryptionType ) ;
|
public class HttpMethodBase { /** * Sets the query string of this HTTP method . The pairs are encoded as UTF - 8 characters .
* To use a different charset the parameters can be encoded manually using EncodingUtil
* and set as a single String .
* @ param params an array of { @ link NameValuePair } s to add as query string
* parameters . The name / value pairs will be automcatically
* URL encoded
* @ see EncodingUtil # formUrlEncode ( NameValuePair [ ] , String )
* @ see # setQueryString ( String ) */
@ Override public void setQueryString ( NameValuePair [ ] params ) { } }
|
LOG . trace ( "enter HttpMethodBase.setQueryString(NameValuePair[])" ) ; queryString = EncodingUtil . formUrlEncode ( params , "UTF-8" ) ;
|
public class ReturnUrl { /** * Get Resource Url for GetAvailablePaymentActionsForReturn
* @ param paymentId Unique identifier of the payment for which to perform the action .
* @ param returnId Unique identifier of the return whose items you want to get .
* @ return String Resource Url */
public static MozuUrl getAvailablePaymentActionsForReturnUrl ( String paymentId , String returnId ) { } }
|
UrlFormatter formatter = new UrlFormatter ( "/api/commerce/returns/{returnId}/payments/{paymentId}/actions" ) ; formatter . formatUrl ( "paymentId" , paymentId ) ; formatter . formatUrl ( "returnId" , returnId ) ; return new MozuUrl ( formatter . getResourceUrl ( ) , MozuUrl . UrlLocation . TENANT_POD ) ;
|
public class DatabaseAccountsInner { /** * Online the specified region for the specified Azure Cosmos DB database account .
* @ param resourceGroupName Name of an Azure resource group .
* @ param accountName Cosmos DB database account name .
* @ param region Cosmos DB region , with spaces between words and each word capitalized .
* @ param serviceCallback the async ServiceCallback to handle successful and failed responses .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the { @ link ServiceFuture } object */
public ServiceFuture < Void > onlineRegionAsync ( String resourceGroupName , String accountName , String region , final ServiceCallback < Void > serviceCallback ) { } }
|
return ServiceFuture . fromResponse ( onlineRegionWithServiceResponseAsync ( resourceGroupName , accountName , region ) , serviceCallback ) ;
|
public class JvmTypesBuilder { /** * / * @ Nullable */
public JvmGenericType toClass ( /* @ Nullable */
EObject sourceElement , /* @ Nullable */
String name ) { } }
|
return toClass ( sourceElement , name , null ) ;
|
public class Searches { /** * Searches the only matching element returning just element if found ,
* nothing otherwise .
* @ param < E > the element type parameter
* @ param array the array to be searched
* @ param predicate the predicate to be applied to each element
* @ throws IllegalStateException if more than one element is found
* @ return just the element found or nothing */
public static < E > Optional < E > searchOne ( E [ ] array , Predicate < E > predicate ) { } }
|
final Iterator < E > filtered = new FilteringIterator < E > ( new ArrayIterator < E > ( array ) , predicate ) ; return new MaybeOneElement < E > ( ) . apply ( filtered ) ;
|
public class RedisBase { /** * 删除给定的一个 key 。
* 不存在的 key 会被忽略 。
* @ param keyBytes
* @ return true : 存在该key删除时返回
* false : 不存在该key */
public boolean remove ( ) { } }
|
try { if ( ! isBinary ) return getJedisCommands ( groupName ) . del ( key ) == 1 ; if ( isCluster ( groupName ) ) { return getBinaryJedisClusterCommands ( groupName ) . del ( keyBytes ) == 1 ; } return getBinaryJedisCommands ( groupName ) . del ( keyBytes ) == 1 ; } finally { getJedisProvider ( groupName ) . release ( ) ; }
|
public class ObjectBindTransform { /** * / * ( non - Javadoc )
* @ see com . abubusoft . kripton . processor . bind . transform . BindTransform # generateParseOnXml ( com . abubusoft . kripton . processor . bind . BindTypeContext , com . squareup . javapoet . MethodSpec . Builder , java . lang . String , com . squareup . javapoet . TypeName , java . lang . String , com . abubusoft . kripton . processor . bind . model . BindProperty ) */
@ Override public void generateParseOnXml ( BindTypeContext context , MethodSpec . Builder methodBuilder , String parserName , TypeName beanClass , String beanName , BindProperty property ) { } }
|
// TODO QUA
// TypeName typeName = resolveTypeName ( property . getParent ( ) ,
// property . getPropertyType ( ) . getTypeName ( ) ) ;
TypeName typeName = property . getPropertyType ( ) . getTypeName ( ) ; String bindName = context . getBindMapperName ( context , typeName ) ; methodBuilder . addStatement ( setter ( beanClass , beanName , property , "$L.parseOnXml(xmlParser, eventType)" ) , bindName ) ;
|
public class IfcPersonImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
@ SuppressWarnings ( "unchecked" ) public EList < IfcActorRole > getRoles ( ) { } }
|
return ( EList < IfcActorRole > ) eGet ( Ifc2x3tc1Package . Literals . IFC_PERSON__ROLES , true ) ;
|
public class Labeling { /** * labeling observation sequences .
* @ param file the file
* @ return a list of sentences with tags annotated */
@ SuppressWarnings ( "unchecked" ) public List seqLabeling ( File file ) { } }
|
List < Sentence > obsvSeqs = dataReader . readFile ( file . getPath ( ) ) ; return labeling ( obsvSeqs ) ;
|
public class AzureBatchHelper { /** * Adds a single task to a job on Azure Batch .
* @ param jobId the ID of the job .
* @ param taskId the ID of the task .
* @ param jobJarUri the publicly accessible uri list to the job jar directory .
* @ param confUri the publicly accessible uri list to the job configuration directory .
* @ param command the commandline argument to execute the job .
* @ throws IOException */
public void submitTask ( final String jobId , final String taskId , final URI jobJarUri , final URI confUri , final String command ) throws IOException { } }
|
final List < ResourceFile > resources = new ArrayList < > ( ) ; final ResourceFile jarSourceFile = new ResourceFile ( ) . withBlobSource ( jobJarUri . toString ( ) ) . withFilePath ( AzureBatchFileNames . getTaskJarFileName ( ) ) ; resources . add ( jarSourceFile ) ; final ResourceFile confSourceFile = new ResourceFile ( ) . withBlobSource ( confUri . toString ( ) ) . withFilePath ( this . azureBatchFileNames . getEvaluatorShimConfigurationPath ( ) ) ; resources . add ( confSourceFile ) ; LOG . log ( Level . INFO , "Evaluator task command: {0}" , command ) ; TaskAddParameter taskAddParameter = new TaskAddParameter ( ) . withId ( taskId ) . withResourceFiles ( resources ) . withContainerSettings ( createTaskContainerSettings ( taskId ) ) . withCommandLine ( command ) ; if ( this . areContainersEnabled ) { taskAddParameter = taskAddParameter . withUserIdentity ( new UserIdentity ( ) . withAutoUser ( new AutoUserSpecification ( ) . withElevationLevel ( ElevationLevel . ADMIN ) ) ) ; } this . client . taskOperations ( ) . createTask ( jobId , taskAddParameter ) ;
|
public class ResourceUtil { /** * check if moveing a file is ok with the rules for the Resource interface , to not change this
* rules .
* @ param source
* @ param target
* @ throws IOException */
public static void checkMoveToOK ( Resource source , Resource target ) throws IOException { } }
|
if ( ! source . exists ( ) ) { throw new IOException ( "can't move [" + source . getPath ( ) + "] to [" + target . getPath ( ) + "], source file does not exist" ) ; } if ( source . isDirectory ( ) && target . isFile ( ) ) throw new IOException ( "can't move [" + source . getPath ( ) + "] directory to [" + target . getPath ( ) + "], target is a file" ) ; if ( source . isFile ( ) && target . isDirectory ( ) ) throw new IOException ( "can't move [" + source . getPath ( ) + "] file to [" + target . getPath ( ) + "], target is a directory" ) ;
|
public class ListUtil { /** * Helper function for { @ link # removeRef } , etc . */
protected static Object remove ( EqualityComparator eqc , Object [ ] list , Object element ) { } }
|
return remove ( list , indexOf ( eqc , list , element ) ) ;
|
public class Syslog { /** * destroyInstance ( ) gracefully shuts down the specified Syslog protocol and
* removes the instance from Syslog4j .
* @ param protocol - the Syslog protocol to destroy
* @ throws SyslogRuntimeException */
public synchronized static void destroyInstance ( String protocol ) throws SyslogRuntimeException { } }
|
if ( StringUtils . isBlank ( protocol ) ) { return ; } String _protocol = protocol . toLowerCase ( ) ; if ( instances . containsKey ( _protocol ) ) { SyslogUtility . sleep ( SyslogConstants . THREAD_LOOP_INTERVAL_DEFAULT ) ; SyslogIF syslog = instances . get ( _protocol ) ; try { syslog . shutdown ( ) ; } finally { instances . remove ( _protocol ) ; } } else { throw new SyslogRuntimeException ( "Cannot destroy protocol \"%s\" instance; call shutdown instead" , protocol ) ; }
|
public class GovernmentChecker { /** * Looks for a noun in the sentence ' s objects .
* @ param sentence
* entered by the user
* @ return a < tt > List > < / tt > of every noun found in the sentence ' s objects and
* its location in the sentence */
public List < Token > findNouns ( Sentence sentence ) { } }
|
List < Token > nouns = new ArrayList < Token > ( ) ; List < SyntacticChunk > syntChunks = sentence . getSyntacticChunks ( ) ; for ( int i = 0 ; i < syntChunks . size ( ) ; i ++ ) { String tag = syntChunks . get ( i ) . getTag ( ) ; if ( tag . equals ( "PIV" ) || tag . equals ( "ACC" ) || tag . equals ( "SC" ) ) { for ( Token token : syntChunks . get ( i ) . getTokens ( ) ) { if ( token . getPOSTag ( ) . equals ( "n" ) || token . getPOSTag ( ) . equals ( "pron-pers" ) || token . getPOSTag ( ) . equals ( "prop" ) ) { nouns . add ( token ) ; } } } } return nouns ;
|
public class TimeZone { /** * internal version ( which is called by public APIs ) accepts
* SHORT , LONG , SHORT _ GENERIC , LONG _ GENERIC , SHORT _ GMT , LONG _ GMT ,
* SHORT _ COMMONLY _ USED and GENERIC _ LOCATION . */
private String _getDisplayName ( int style , boolean daylight , ULocale locale ) { } }
|
if ( locale == null ) { throw new NullPointerException ( "locale is null" ) ; } String result = null ; if ( style == GENERIC_LOCATION || style == LONG_GENERIC || style == SHORT_GENERIC ) { // Generic format
TimeZoneFormat tzfmt = TimeZoneFormat . getInstance ( locale ) ; long date = System . currentTimeMillis ( ) ; Output < TimeType > timeType = new Output < TimeType > ( TimeType . UNKNOWN ) ; switch ( style ) { case GENERIC_LOCATION : result = tzfmt . format ( Style . GENERIC_LOCATION , this , date , timeType ) ; break ; case LONG_GENERIC : result = tzfmt . format ( Style . GENERIC_LONG , this , date , timeType ) ; break ; case SHORT_GENERIC : result = tzfmt . format ( Style . GENERIC_SHORT , this , date , timeType ) ; break ; } // Generic format many use Localized GMT as the final fallback .
// When Localized GMT format is used , the result might not be
// appropriate for the requested daylight value .
if ( daylight && timeType . value == TimeType . STANDARD || ! daylight && timeType . value == TimeType . DAYLIGHT ) { int offset = daylight ? getRawOffset ( ) + getDSTSavings ( ) : getRawOffset ( ) ; result = ( style == SHORT_GENERIC ) ? tzfmt . formatOffsetShortLocalizedGMT ( offset ) : tzfmt . formatOffsetLocalizedGMT ( offset ) ; } } else if ( style == LONG_GMT || style == SHORT_GMT ) { // Offset format
TimeZoneFormat tzfmt = TimeZoneFormat . getInstance ( locale ) ; int offset = daylight && useDaylightTime ( ) ? getRawOffset ( ) + getDSTSavings ( ) : getRawOffset ( ) ; switch ( style ) { case LONG_GMT : result = tzfmt . formatOffsetLocalizedGMT ( offset ) ; break ; case SHORT_GMT : result = tzfmt . formatOffsetISO8601Basic ( offset , false , false , false ) ; break ; } } else { // Specific format
assert ( style == LONG || style == SHORT || style == SHORT_COMMONLY_USED ) ; // Gets the name directly from TimeZoneNames
long date = System . currentTimeMillis ( ) ; TimeZoneNames tznames = TimeZoneNames . getInstance ( locale ) ; NameType nameType = null ; switch ( style ) { case LONG : nameType = daylight ? NameType . LONG_DAYLIGHT : NameType . LONG_STANDARD ; break ; case SHORT : case SHORT_COMMONLY_USED : nameType = daylight ? NameType . SHORT_DAYLIGHT : NameType . SHORT_STANDARD ; break ; } result = tznames . getDisplayName ( ZoneMeta . getCanonicalCLDRID ( this ) , nameType , date ) ; if ( result == null ) { // Fallback to localized GMT
TimeZoneFormat tzfmt = TimeZoneFormat . getInstance ( locale ) ; int offset = daylight && useDaylightTime ( ) ? getRawOffset ( ) + getDSTSavings ( ) : getRawOffset ( ) ; result = ( style == LONG ) ? tzfmt . formatOffsetLocalizedGMT ( offset ) : tzfmt . formatOffsetShortLocalizedGMT ( offset ) ; } } assert ( result != null ) ; return result ;
|
public class PropertyController { /** * Gets the value for a given property for an entity . If the property is not set , a non - null
* { @ link net . nemerosa . ontrack . model . structure . Property } is returned but is marked as
* { @ linkplain net . nemerosa . ontrack . model . structure . Property # isEmpty ( ) empty } .
* If the property is not opened for viewing , the call could be rejected with an
* authorization exception .
* @ param entityType Type of the entity to get the edition form for
* @ param id ID of the entity to get the edition form for
* @ param propertyTypeName Fully qualified name of the property to get the form for
* @ return A response that defines the property */
@ RequestMapping ( value = "{entityType}/{id}/{propertyTypeName}/view" , method = RequestMethod . GET ) public Resource < Property < ? > > getPropertyValue ( @ PathVariable ProjectEntityType entityType , @ PathVariable ID id , @ PathVariable String propertyTypeName ) { } }
|
return Resource . of ( propertyService . getProperty ( getEntity ( entityType , id ) , propertyTypeName ) , uri ( on ( getClass ( ) ) . getPropertyValue ( entityType , id , propertyTypeName ) ) ) ;
|
public class XMLSerializer { /** * Gets the property .
* @ param name the name
* @ return obj
* @ throws IllegalArgumentException the illegal argument exception */
public Object getProperty ( String name ) throws IllegalArgumentException { } }
|
if ( name == null ) { throw new IllegalArgumentException ( "property name can not be null" ) ; } if ( PROPERTY_SERIALIZER_INDENTATION . equals ( name ) ) { return indentationString ; } else if ( PROPERTY_SERIALIZER_LINE_SEPARATOR . equals ( name ) ) { return lineSeparator ; } else if ( PROPERTY_LOCATION . equals ( name ) ) { return location ; } else { return null ; }
|
public class ClassScanner { /** * Find all implementations of an interface ( if an interface is provided ) or extensions ( if a class is provided )
* @ param clazz
* @ param < T >
* @ return */
public < T > List < Class < ? extends T > > getImplementations ( final Class < T > clazz ) { } }
|
return getImplementations ( clazz , null ) ;
|
public class FileUtils { /** * Devuelve true si es un fichero y se puede leer .
* @ param abstolutePath el path absoluto de un fichero
* @ return */
public static boolean canWriteFile ( File file ) { } }
|
if ( file != null && file . canWrite ( ) && ! file . isDirectory ( ) ) { return true ; } else { return false ; }
|
public class BaseAdsServiceClientFactoryHelper { /** * Creates an { @ link AdsServiceDescriptor } for a specified service .
* @ param interfaceClass the ads service that we want a descriptor for
* @ param version the version of the service
* @ return a descriptor of the requested service */
@ Override public D createServiceDescriptor ( Class < ? > interfaceClass , String version ) { } }
|
return adsServiceDescriptorFactory . create ( interfaceClass , version ) ;
|
public class appfwprofile_cookieconsistency_binding { /** * Use this API to fetch appfwprofile _ cookieconsistency _ binding resources of given name . */
public static appfwprofile_cookieconsistency_binding [ ] get ( nitro_service service , String name ) throws Exception { } }
|
appfwprofile_cookieconsistency_binding obj = new appfwprofile_cookieconsistency_binding ( ) ; obj . set_name ( name ) ; appfwprofile_cookieconsistency_binding response [ ] = ( appfwprofile_cookieconsistency_binding [ ] ) obj . get_resources ( service ) ; return response ;
|
public class Rollbar { /** * Record a debug error with custom parameters and human readable description .
* @ param error the error .
* @ param custom the custom data .
* @ param description the human readable description of error . */
public void debug ( Throwable error , Map < String , Object > custom , String description ) { } }
|
log ( error , custom , description , Level . DEBUG ) ;
|
public class WSRdbManagedConnectionImpl { /** * Safely gets the NetworkTimeout . This method differs from the original getNetworkTimeout ( ) method
* by returning a default value when before JDBC - 4.1 or on a driver that does not support getNetworkTimeout ( ) .
* @ return The networkTimeout , or a default value if getNetworkTimeout is not supported by the driver , or
* the current JDBC runtime is pre - JDBC - 4.1. */
public int getNetworkTimeoutSafely ( ) throws SQLException { } }
|
if ( ! mcf . supportsGetNetworkTimeout ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) Tr . debug ( tc , "Returning default network timeout." , defaultNetworkTimeout ) ; return defaultNetworkTimeout ; } Throwable x ; try { return getNetworkTimeout ( ) ; } catch ( AbstractMethodError e ) { // If we are running pre - Java7
x = e ; } catch ( NoSuchMethodError e ) { // If the driver is pre - 4.1
x = e ; } catch ( SQLException e ) { // In case the driver is not 4.1 compliant but says it is
String sqlMessge = e . getMessage ( ) == null ? "" : e . getMessage ( ) ; if ( AdapterUtil . isUnsupportedException ( e ) ) x = e ; // try to catch any other variation of not supported , does not support , unsupported , etc .
// this is needed by several JDBC drivers , but one known driver is DataDirect OpenEdge JDBC Driver
else if ( sqlMessge . contains ( "support" ) ) x = e ; else throw e ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) Tr . debug ( this , tc , "getNetworkTimeout support false due to " + x ) ; mcf . supportsGetNetworkTimeout = false ; return defaultNetworkTimeout ;
|
public class BytecodeHelper { /** * Generates the bytecode to autobox the current value on the stack */
@ Deprecated public static boolean box ( MethodVisitor mv , Class type ) { } }
|
if ( ReflectionCache . getCachedClass ( type ) . isPrimitive && type != void . class ) { String returnString = "(" + BytecodeHelper . getTypeDescription ( type ) + ")Ljava/lang/Object;" ; mv . visitMethodInsn ( INVOKESTATIC , DTT_CLASSNAME , "box" , returnString , false ) ; return true ; } return false ;
|
public class SyntheticStorableReferenceBuilder { /** * Generates a property name which doesn ' t clash with any already defined . */
private String generateSafeMethodName ( StorableInfo info , String prefix ) { } }
|
Class type = info . getStorableType ( ) ; // Try a few times to generate a unique name . There ' s nothing special
// about choosing 100 as the limit .
int value = 0 ; for ( int i = 0 ; i < 100 ; i ++ ) { String name = prefix + value ; if ( ! methodExists ( type , name ) ) { return name ; } value = name . hashCode ( ) ; } throw new InternalError ( "Unable to create unique method name starting with: " + prefix ) ;
|
public class URLs { /** * Encodes the specified string .
* @ param str the specified string
* @ return URL encoded string */
public static String encode ( final String str ) { } }
|
try { return URLEncoder . encode ( str , "UTF-8" ) ; } catch ( final Exception e ) { LOGGER . log ( Level . WARN , "Encodes str [" + str + "] failed" , e ) ; return str ; }
|
public class PushApplicationEndpoint { /** * Update Push Application
* @ param pushApplicationID id of { @ link PushApplication }
* @ param updatedPushApp new info of { @ link PushApplication }
* @ return updated { @ link PushApplication }
* @ statuscode 204 The PushApplication updated successfully
* @ statuscode 400 The format of the client request was incorrect
* @ statuscode 404 The requested PushApplication resource does not exist */
@ PUT @ Path ( "/{pushAppID}" ) @ Consumes ( MediaType . APPLICATION_JSON ) @ Produces ( MediaType . APPLICATION_JSON ) public Response updatePushApplication ( @ PathParam ( "pushAppID" ) String pushApplicationID , PushApplication updatedPushApp ) { } }
|
PushApplication pushApp = getSearch ( ) . findByPushApplicationIDForDeveloper ( pushApplicationID ) ; if ( pushApp != null ) { // some validation
try { validateModelClass ( updatedPushApp ) ; } catch ( ConstraintViolationException cve ) { logger . info ( "Unable to update Push Application '{}'" , pushApplicationID ) ; logger . debug ( "Details: {}" , cve ) ; // Build and return the 400 ( Bad Request ) response
ResponseBuilder builder = createBadRequestResponse ( cve . getConstraintViolations ( ) ) ; return builder . build ( ) ; } // update name / desc :
pushApp . setDescription ( updatedPushApp . getDescription ( ) ) ; pushApp . setName ( updatedPushApp . getName ( ) ) ; logger . trace ( "Invoke service to update a push application" ) ; pushAppService . updatePushApplication ( pushApp ) ; return Response . noContent ( ) . build ( ) ; } return Response . status ( Status . NOT_FOUND ) . entity ( "Could not find requested PushApplicationEntity" ) . build ( ) ;
|
public class CodeBuilder { /** * creation style instructions */
public void newObject ( TypeDesc type ) { } }
|
if ( type . isArray ( ) ) { newObject ( type , 1 ) ; } else { ConstantInfo info = mCp . addConstantClass ( type ) ; addCode ( 1 , Opcode . NEW , info ) ; }
|
public class StandardDirectoryAgentServer { /** * Handles unicast UDP AttrRqst message arrived to this directory agent .
* < br / >
* This directory agent will reply with a list of attributes of matching services .
* @ param attrRqst the AttrRqst message to handle
* @ param localAddress the socket address the message arrived to
* @ param remoteAddress the socket address the message was sent from */
protected void handleUDPAttrRqst ( AttrRqst attrRqst , InetSocketAddress localAddress , InetSocketAddress remoteAddress ) { } }
|
// Match scopes , RFC 2608 , 11.1
if ( ! scopes . weakMatch ( attrRqst . getScopes ( ) ) ) { udpAttrRply . perform ( localAddress , remoteAddress , attrRqst , SLPError . SCOPE_NOT_SUPPORTED ) ; return ; } Attributes attributes = matchAttributes ( attrRqst ) ; if ( logger . isLoggable ( Level . FINE ) ) logger . fine ( "DirectoryAgent " + this + " returning attributes for service " + attrRqst . getURL ( ) + ": " + attributes . asString ( ) ) ; udpAttrRply . perform ( localAddress , remoteAddress , attrRqst , attributes ) ;
|
public class Clock { /** * Defines if the date of the clock will be drawn .
* @ param VISIBLE */
public void setDateVisible ( final boolean VISIBLE ) { } }
|
if ( null == dateVisible ) { _dateVisible = VISIBLE ; fireUpdateEvent ( VISIBILITY_EVENT ) ; } else { dateVisible . set ( VISIBLE ) ; }
|
public class SQLParser { /** * Build a pattern segment to recognize all the ALLOW or PARTITION modifier clauses
* of a CREATE PROCEDURE statement .
* @ return Pattern to be used by the caller inside a CREATE PROCEDURE pattern .
* Capture groups :
* ( 1 ) All ALLOW / PARTITION modifier clauses as one string */
static SQLPatternPart unparsedProcedureModifierClauses ( ) { } }
|
// Force the leading space to go inside the repeat block .
return SPF . capture ( SPF . repeat ( makeInnerProcedureModifierClausePattern ( false ) ) ) . withFlags ( SQLPatternFactory . ADD_LEADING_SPACE_TO_CHILD ) ;
|
public class RpcHelper { /** * Uses idToken to retrieve the user account information from GITkit service .
* @ param idToken */
public JSONObject getAccountInfo ( String idToken ) throws GitkitClientException , GitkitServerException { } }
|
try { // Uses idToken to make the server call to GITKit
JSONObject params = new JSONObject ( ) . put ( "idToken" , idToken ) ; return invokeGoogle2LegOauthApi ( "getAccountInfo" , params ) ; } catch ( JSONException e ) { throw new GitkitServerException ( "OAuth API failed" ) ; }
|
public class Dct { /** * setter for timexId - sets
* @ generated
* @ param v value to set into the feature */
public void setTimexId ( String v ) { } }
|
if ( Dct_Type . featOkTst && ( ( Dct_Type ) jcasType ) . casFeat_timexId == null ) jcasType . jcas . throwFeatMissing ( "timexId" , "de.unihd.dbs.uima.types.heideltime.Dct" ) ; jcasType . ll_cas . ll_setStringValue ( addr , ( ( Dct_Type ) jcasType ) . casFeatCode_timexId , v ) ;
|
public class ConfigTool { /** * Processes the decoration model , acquiring the skin and page
* configuration .
* The decoration model are the contents of the site . xml file .
* @ param model
* decoration data */
private final void processDecoration ( final DecorationModel model ) { } }
|
final Object customObj ; // Object for the < custom > node
final Xpp3Dom customNode ; // < custom > node
final Xpp3Dom skinNode ; // < skinConfig > node
customObj = model . getCustom ( ) ; if ( customObj instanceof Xpp3Dom ) { // This is the < custom > node in the site . xml file
customNode = ( Xpp3Dom ) customObj ; // Acquires < skinConfig > node
skinNode = customNode . getChild ( ConfigToolConstants . SKIN_KEY ) ; if ( skinNode == null ) { setSkinConfig ( new Xpp3Dom ( "" ) ) ; } else { setSkinConfig ( skinNode ) ; } }
|
public class FileSystem { /** * Opens an FSDataOutputStream at the indicated Path with write - progress
* reporting . Same as create ( ) , except fails if parent directory doesn ' t
* already exist .
* @ param f the file name to open
* @ param overwrite if a file with this name already exists , then if true ,
* the file will be overwritten , and if false an error will be thrown .
* @ param bufferSize the size of the buffer to be used .
* @ param replication required block replication for the file .
* @ param blockSize
* @ param progress
* @ throws IOException
* @ see # setPermission ( Path , FsPermission )
* @ deprecated API only for 0.20 - append */
@ Deprecated public FSDataOutputStream createNonRecursive ( Path f , boolean overwrite , int bufferSize , short replication , long blockSize , Progressable progress ) throws IOException { } }
|
return this . createNonRecursive ( f , FsPermission . getDefault ( ) , overwrite , bufferSize , replication , blockSize , progress ) ;
|
public class JDBCConnection { /** * is called from within nativeSQL when the start of an JDBC escape sequence is encountered */
private int onStartEscapeSequence ( String sql , StringBuffer sb , int i ) throws SQLException { } }
|
sb . setCharAt ( i ++ , ' ' ) ; i = StringUtil . skipSpaces ( sql , i ) ; if ( sql . regionMatches ( true , i , "fn " , 0 , 3 ) || sql . regionMatches ( true , i , "oj " , 0 , 3 ) || sql . regionMatches ( true , i , "ts " , 0 , 3 ) ) { sb . setCharAt ( i ++ , ' ' ) ; sb . setCharAt ( i ++ , ' ' ) ; } else if ( sql . regionMatches ( true , i , "d " , 0 , 2 ) || sql . regionMatches ( true , i , "t " , 0 , 2 ) ) { sb . setCharAt ( i ++ , ' ' ) ; } else if ( sql . regionMatches ( true , i , "call " , 0 , 5 ) ) { i += 4 ; } else if ( sql . regionMatches ( true , i , "?= call " , 0 , 8 ) ) { sb . setCharAt ( i ++ , ' ' ) ; sb . setCharAt ( i ++ , ' ' ) ; i += 5 ; } else if ( sql . regionMatches ( true , i , "escape " , 0 , 7 ) ) { i += 6 ; } else { i -- ; throw Util . sqlException ( Error . error ( ErrorCode . JDBC_CONNECTION_NATIVE_SQL , sql . substring ( i ) ) ) ; } return i ;
|
public class FilePolicyIndex { /** * ( non - Javadoc )
* @ seemelcoe . xacml . pdp . data . Index # getPolicies ( org . jboss . security . xacml . sunxacml .
* EvaluationCtx ) */
@ Override public Map < String , AbstractPolicy > getPolicies ( EvaluationCtx eval , PolicyFinder policyFinder ) throws PolicyIndexException { } }
|
// no indexing , return everything
// return a copy , otherwise the map could change during evaluation if policies are added , deleted etc
readLock . lock ( ) ; try { Map < String , AbstractPolicy > result = new ConcurrentHashMap < String , AbstractPolicy > ( ) ; for ( String id : policies . keySet ( ) ) { AbstractPolicy policy = handleDocument ( m_policyReader . readPolicy ( policies . get ( id ) ) , policyFinder ) ; result . put ( id , policy ) ; } return result ; } catch ( ParsingException pe ) { throw new PolicyIndexException ( pe . getMessage ( ) , pe ) ; } finally { readLock . unlock ( ) ; }
|
public class ArrayUtils { /** * Returns an { @ link Iterator } iterating over the elements in the array .
* @ param < T > Class type of the elements in the array .
* @ param array array to iterate .
* @ return an { @ link Iterator } to iterate over the elements in the array
* or an empty { @ link Iterator } if the array is null or empty .
* @ see java . util . Iterator */
@ NullSafe @ SafeVarargs public static < T > Iterator < T > asIterator ( T ... array ) { } }
|
return ( array == null ? Collections . emptyIterator ( ) : new Iterator < T > ( ) { private int index = 0 ; @ Override public boolean hasNext ( ) { return ( index < array . length ) ; } @ Override public T next ( ) { Assert . isTrue ( hasNext ( ) , new NoSuchElementException ( "No more elements" ) ) ; return array [ index ++ ] ; } } ) ;
|
public class DiscordWebSocketAdapter { /** * Adds a server id to be queued for the " request guild members " packet .
* @ param server The server . */
public void queueRequestGuildMembers ( Server server ) { } }
|
logger . debug ( "Queued {} for request guild members packet" , server ) ; requestGuildMembersQueue . add ( server . getId ( ) ) ;
|
public class ComponentBindingsValuesProvider { /** * ( non - Javadoc )
* @ see
* org . apache . sling . scripting . api . BindingsValuesProvider # addBindings ( javax
* . script . Bindings ) */
@ SuppressWarnings ( "unchecked" ) // Suppressing warnings since Commons Collections is gangster . . .
@ Override public void addBindings ( Bindings bindings ) { } }
|
try { SlingHttpServletRequest request = ( SlingHttpServletRequest ) bindings . get ( "request" ) ; String resourceType = request . getResource ( ) . getResourceType ( ) ; Collection < ComponentBindingsProvider > cis = cif . getComponentBindingsProviders ( resourceType ) ; if ( cis != null && cis . size ( ) > 0 ) { CQVariables variables = variablesService . getVariables ( bindings ) ; for ( ComponentBindingsProvider ci : cis ) { try { log . debug ( "Invoking component bindings provider {}" , ci ) ; ci . addBindings ( variables , bindings ) ; } catch ( Exception e ) { log . error ( "Exception invoking component bindings provider " + ci , e ) ; recentExceptions . add ( new ComponentBindingsExceptionData ( ci , e ) ) ; } } } } catch ( Exception e ) { log . error ( "Exception invoking component binding providers" , e ) ; }
|
public class JournalNode { /** * Get the list of journal addresses to connect .
* Consistent with the QuorumJournalManager . getHttpAddresses . */
static List < InetSocketAddress > getJournalHttpAddresses ( Configuration conf ) { } }
|
String [ ] hosts = JournalConfigHelper . getJournalHttpHosts ( conf ) ; List < InetSocketAddress > addrs = new ArrayList < InetSocketAddress > ( ) ; for ( String host : hosts ) { addrs . add ( NetUtils . createSocketAddr ( host ) ) ; } return addrs ;
|
public class WSKeyStore { /** * Query the password of a key entry in this keystore .
* @ param alias
* @ return */
private SerializableProtectedString getKeyPassword ( String alias ) { } }
|
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) Tr . debug ( tc , "getKeyPassword " + alias ) ; SerializableProtectedString keyPass = certAliasInfo . get ( alias ) ; if ( keyPass != null ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) Tr . debug ( tc , "getKeyPassword entry found." ) ; } else { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) Tr . debug ( tc , "getKeyPassword -> null" ) ; } return keyPass ;
|
public class DeterministicKey { /** * Return the fingerprint of this key ' s parent as an int value , or zero if this key is the
* root node of the key hierarchy . Raise an exception if the arguments are inconsistent .
* This method exists to avoid code repetition in the constructors . */
private int ascertainParentFingerprint ( DeterministicKey parentKey , int parentFingerprint ) throws IllegalArgumentException { } }
|
if ( parentFingerprint != 0 ) { if ( parent != null ) checkArgument ( parent . getFingerprint ( ) == parentFingerprint , "parent fingerprint mismatch" , Integer . toHexString ( parent . getFingerprint ( ) ) , Integer . toHexString ( parentFingerprint ) ) ; return parentFingerprint ; } else return 0 ;
|
public class EvaluatorRegistry { /** * Adds an evaluator definition class to the registry using the
* evaluator class name . The class will be loaded and the corresponting
* evaluator ID will be added to the registry . In case there exists
* an implementation for that ID already , the new implementation will
* replace the previous one .
* @ param className the name of the class for the implementation definition .
* The class must implement the EvaluatorDefinition interface .
* @ return true if the new class implementation is replacing an old
* implementation for the same evaluator ID . False otherwise . */
@ SuppressWarnings ( "unchecked" ) public void addEvaluatorDefinition ( String className ) { } }
|
try { Class < EvaluatorDefinition > defClass = ( Class < EvaluatorDefinition > ) this . classloader . loadClass ( className ) ; EvaluatorDefinition def = defClass . newInstance ( ) ; addEvaluatorDefinition ( def ) ; } catch ( ClassNotFoundException e ) { throw new RuntimeException ( "Class not found for evaluator definition: " + className , e ) ; } catch ( InstantiationException e ) { throw new RuntimeException ( "Error instantiating class for evaluator definition: " + className , e ) ; } catch ( IllegalAccessException e ) { throw new RuntimeException ( "Illegal access instantiating class for evaluator definition: " + className , e ) ; }
|
public class TextureVideoView { /** * Clears the surface texture by attaching a GL context and clearing it .
* Code taken from < a href = " http : / / stackoverflow . com / a / 31582209 " > Hugo Gresse ' s answer on stackoverflow . com < / a > . */
private void clearSurface ( ) { } }
|
if ( mSurface == null || Build . VERSION . SDK_INT < Build . VERSION_CODES . JELLY_BEAN ) { return ; } EGL10 egl = ( EGL10 ) EGLContext . getEGL ( ) ; EGLDisplay display = egl . eglGetDisplay ( EGL10 . EGL_DEFAULT_DISPLAY ) ; egl . eglInitialize ( display , null ) ; int [ ] attribList = { EGL10 . EGL_RED_SIZE , 8 , EGL10 . EGL_GREEN_SIZE , 8 , EGL10 . EGL_BLUE_SIZE , 8 , EGL10 . EGL_ALPHA_SIZE , 8 , EGL10 . EGL_RENDERABLE_TYPE , EGL10 . EGL_WINDOW_BIT , EGL10 . EGL_NONE , 0 , // placeholder for recordable [ @ - 3]
EGL10 . EGL_NONE } ; EGLConfig [ ] configs = new EGLConfig [ 1 ] ; int [ ] numConfigs = new int [ 1 ] ; egl . eglChooseConfig ( display , attribList , configs , configs . length , numConfigs ) ; EGLConfig config = configs [ 0 ] ; EGLContext context = egl . eglCreateContext ( display , config , EGL10 . EGL_NO_CONTEXT , new int [ ] { 12440 , 2 , EGL10 . EGL_NONE } ) ; EGLSurface eglSurface = egl . eglCreateWindowSurface ( display , config , mSurface , new int [ ] { EGL10 . EGL_NONE } ) ; egl . eglMakeCurrent ( display , eglSurface , eglSurface , context ) ; GLES20 . glClearColor ( 0 , 0 , 0 , 1 ) ; GLES20 . glClear ( GLES20 . GL_COLOR_BUFFER_BIT ) ; egl . eglSwapBuffers ( display , eglSurface ) ; egl . eglDestroySurface ( display , eglSurface ) ; egl . eglMakeCurrent ( display , EGL10 . EGL_NO_SURFACE , EGL10 . EGL_NO_SURFACE , EGL10 . EGL_NO_CONTEXT ) ; egl . eglDestroyContext ( display , context ) ; egl . eglTerminate ( display ) ;
|
public class ListActiveElementsCommand { /** * - - - - - private methods - - - - - */
private void collectActiveElements ( final List < GraphObject > resultList , final DOMNode root , final Set < String > parentDataKeys , final String parent , final int depth ) { } }
|
final String childDataKey = root . getDataKey ( ) ; final Set < String > dataKeys = new LinkedHashSet < > ( parentDataKeys ) ; String parentId = parent ; int dataCentricDepth = depth ; if ( ! StringUtils . isEmpty ( childDataKey ) ) { dataKeys . add ( childDataKey ) ; dataCentricDepth ++ ; } final ActiveElementState state = isActive ( root , dataKeys ) ; if ( ! state . equals ( ActiveElementState . None ) ) { resultList . add ( extractActiveElement ( root , dataKeys , parentId , state , depth ) ) ; if ( state . equals ( ActiveElementState . Query ) ) { parentId = root . getUuid ( ) ; } } for ( final DOMNode child : root . getChildren ( ) ) { collectActiveElements ( resultList , child , dataKeys , parentId , dataCentricDepth ) ; }
|
public class CmsAdminMenu { /** * Adds a menu item at the given position . < p >
* @ param group the group
* @ param position the position
* @ see CmsIdentifiableObjectContainer # addIdentifiableObject ( String , Object , float ) */
public void addGroup ( CmsAdminMenuGroup group , float position ) { } }
|
m_groupContainer . addIdentifiableObject ( group . getName ( ) , group , position ) ;
|
public class GoogleMapShapeConverter { /** * Convert a list of List < LatLng > to a { @ link MultiLineString }
* @ param polylineList polyline list
* @ return multi line string */
public MultiLineString toMultiLineStringFromList ( List < List < LatLng > > polylineList ) { } }
|
return toMultiLineStringFromList ( polylineList , false , false ) ;
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.