signature
stringlengths 43
39.1k
| implementation
stringlengths 0
450k
|
|---|---|
public class GetCloudFormationStackRecordsRequestMarshaller { /** * Marshall the given parameter object . */
public void marshall ( GetCloudFormationStackRecordsRequest getCloudFormationStackRecordsRequest , ProtocolMarshaller protocolMarshaller ) { } }
|
if ( getCloudFormationStackRecordsRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( getCloudFormationStackRecordsRequest . getPageToken ( ) , PAGETOKEN_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
|
public class XMLStreamReaderAsync { /** * / * Public methods */
@ Override public ISynchronizationPoint < Exception > start ( ) { } }
|
SynchronizationPoint < Exception > sp = new SynchronizationPoint < > ( ) ; io . canStartReading ( ) . listenAsync ( new Task . Cpu . FromRunnable ( ( ) -> { try { Starter start = new Starter ( io , defaultEncoding , charactersBuffersSize , maxBuffers ) ; stream = start . start ( ) ; next ( ) . listenInline ( sp ) ; } catch ( Exception e ) { sp . error ( e ) ; } } , "Start parsing XML" , io . getPriority ( ) ) , true ) ; return sp ;
|
public class LogRef { /** * * Service listener entry point . Releases the log service object if * one
* has been fetched . * *
* @ param evt
* Service event */
public void serviceChanged ( ServiceEvent evt ) { } }
|
if ( evt . getServiceReference ( ) == logSR && evt . getType ( ) == ServiceEvent . UNREGISTERING ) { ungetLogService ( ) ; }
|
public class StockholmFileParser { /** * Parses a Stockholm file and returns a { @ link StockholmStructure } object with its content . This method returns
* just after reaching the end of structure delimiter line ( " / / " ) , leaving any remaining empty lines unconsumed .
* @ param scanner
* from where to read the file content
* @ return Stockholm file content , < code > null < / code > if couldn ' t or no more structures .
* @ throws IOException
* @ throws Exception */
StockholmStructure parse ( Scanner scanner ) throws ParserException , IOException { } }
|
if ( scanner == null ) { if ( internalScanner != null ) { scanner = internalScanner ; } else { throw new IllegalArgumentException ( "No Scanner defined" ) ; } } String line = null ; int linesCount = 0 ; try { while ( scanner . hasNextLine ( ) ) { line = scanner . nextLine ( ) ; // if the file is empty
// this condition will not happen , just left in case we decided to go for buffereedReader again for
// performance purpose .
if ( linesCount == 0 && line == null ) { throw new IOException ( "Could not parse Stockholm file, BufferedReader returns null!" ) ; } // ignore empty lines
if ( ( /* status = = STATUS _ INSIDE _ FILE & & */
line == null ) || line . trim ( ) . length ( ) == 0 ) { continue ; } if ( line . startsWith ( "#=G" ) ) { // / / comment line or metadata
// line = line . substring ( 1 ) . trim ( ) ;
// line = line . substring ( 1 ) . trim ( ) ;
if ( line . startsWith ( GENERIC_PER_FILE_ANNOTATION , 2 ) ) { // # = GF < featurename > < generic per - file annotation , free text >
int firstSpaceIndex = line . indexOf ( ' ' , 5 ) ; String featureName = line . substring ( 5 , firstSpaceIndex ) ; String value = line . substring ( firstSpaceIndex ) . trim ( ) ; handleFileAnnotation ( featureName , value ) ; } else if ( line . startsWith ( GENERIC_PER_CONSENSUS_ANNOTATION , 2 ) ) { // Being in a consensus means we are no longer in a sequence .
// this . status = STATUS _ INSIDE _ FILE ;
// # = GC < featurename > < generic per - column annotation , exactly 1 char per column >
int firstSpaceIndex = line . indexOf ( ' ' , 5 ) ; String featureName = line . substring ( 5 , firstSpaceIndex ) ; String value = line . substring ( firstSpaceIndex ) . trim ( ) ; handleConsensusAnnotation ( featureName , value ) ; } else if ( line . startsWith ( GENERIC_PER_SEQUENCE_ANNOTATION , 2 ) ) { // # = GS < seqname > < featurename > < generic per - sequence annotation , free text >
int index1 = line . indexOf ( ' ' , 5 ) ; String seqName = line . substring ( 5 , index1 ) ; while ( line . charAt ( ++ index1 ) <= ' ' ) // i . e . white space
; // keep advancing
int index2 = line . indexOf ( ' ' , index1 ) ; String featureName = line . substring ( index1 , index2 ) ; String value = line . substring ( index2 ) . trim ( ) ; handleSequenceAnnotation ( seqName , featureName , value ) ; } else if ( line . startsWith ( GENERIC_PER_RESIDUE_ANNOTATION , 2 ) ) { // # = GR < seqname > < featurename > < generic per - sequence AND per - column mark - up , exactly 1
// character per column >
int index1 = line . indexOf ( ' ' , 5 ) ; String seqName = line . substring ( 5 , index1 ) ; while ( line . charAt ( ++ index1 ) == ' ' ) ; // keep advancing
int index2 = line . indexOf ( ' ' , index1 ) ; String featureName = line . substring ( index1 , index2 ) ; String value = line . substring ( index2 ) . trim ( ) ; handleResidueAnnotation ( seqName , featureName , value ) ; } } else if ( line . startsWith ( "# STOCKHOLM" ) ) { // it is the header line
// if ( status = = STATUS _ OUTSIDE _ FILE ) {
// status = STATUS _ INSIDE _ FILE ;
// String [ ] header = line . split ( " \ \ s + " ) ;
// this . stockholmStructure = new StockholmStructure ( ) ;
// this . stockholmStructure . getFileAnnotation ( ) . setFormat ( header [ 1 ] ) ;
// this . stockholmStructure . getFileAnnotation ( ) . setVersion ( header [ 2 ] ) ;
// } else {
// throw new ParserException ( " Uexpected Format line : [ " + line + " ] " ) ;
String [ ] header = line . split ( "\\s+" ) ; this . stockholmStructure = new StockholmStructure ( ) ; this . stockholmStructure . getFileAnnotation ( ) . setFormat ( header [ 1 ] ) ; this . stockholmStructure . getFileAnnotation ( ) . setVersion ( header [ 2 ] ) ; } else if ( line . trim ( ) . equals ( "//" ) ) { // status = STATUS _ OUTSIDE _ FILE ;
break ; // should we just break immediately or jump next empty lines ?
} else /* if ( ! line . startsWith ( " # " ) ) */
{ // most probably This line corresponds to a sequence . Something like :
// O83071/192-246 MTCRAQLIAVPRASSLAEAIACAQKMRVSRVPVYERS
// N . B . as long as we don ' t check the status now , it is somehow error prone
handleSequenceLine ( line ) ; // / / = = = = = removed status = = = = =
// if ( status = = STATUS _ IN _ SEQUENCE ) {
// / / This line corresponds to a sequence . Something like :
// / / O83071/192-246 MTCRAQLIAVPRASSLAEAIACAQKMRVSRVPVYERS
// handleSequenceLine ( line ) ;
// / / } else if ( status = = STATUS _ OUTSIDE _ FILE ) {
// / / throw new
// ParserException ( " The end of file character was allready reached but there are still sequence lines " ) ;
// } else {
// System . err . println ( " Error : Unknown or unexpected line [ " + line
// + " ] . \ nPlease contact the Biojava team . " ) ;
// throw new ParserException ( " Error : Unknown or unexpected line [ " + line + " ] . " ) ;
// / / = = = = = removed status = = = = =
} linesCount ++ ; } } catch ( IOException e ) { // TODO : Best practice is to catch or throw Exception , never both
logger . error ( "IOException: " , e ) ; throw new IOException ( "Error parsing Stockholm file" ) ; } StockholmStructure structure = this . stockholmStructure ; this . stockholmStructure = null ; if ( structure != null ) { int length = - 1 ; Map < String , StringBuffer > sequences = structure . getSequences ( ) ; for ( String sequencename : sequences . keySet ( ) ) { StringBuffer sequence = sequences . get ( sequencename ) ; if ( length == - 1 ) { length = sequence . length ( ) ; } else if ( length != sequence . length ( ) ) { throw new RuntimeException ( "Sequences have different lengths" ) ; } } } return structure ;
|
public class Vector { /** * Returns the index of the last occurrence of the specified element in
* this vector , searching backwards from { @ code index } , or returns - 1 if
* the element is not found .
* More formally , returns the highest index { @ code i } such that
* < tt > ( i & nbsp ; & lt ; = & nbsp ; index & nbsp ; & amp ; & amp ; & nbsp ; ( o = = null & nbsp ; ? & nbsp ; get ( i ) = = null & nbsp ; : & nbsp ; o . equals ( get ( i ) ) ) ) < / tt > ,
* or - 1 if there is no such index .
* @ param o element to search for
* @ param index index to start searching backwards from
* @ return the index of the last occurrence of the element at position
* less than or equal to { @ code index } in this vector ;
* - 1 if the element is not found .
* @ throws IndexOutOfBoundsException if the specified index is greater
* than or equal to the current size of this vector */
public synchronized int lastIndexOf ( Object o , int index ) { } }
|
if ( index >= elementCount ) throw new IndexOutOfBoundsException ( index + " >= " + elementCount ) ; if ( o == null ) { for ( int i = index ; i >= 0 ; i -- ) if ( elementData [ i ] == null ) return i ; } else { for ( int i = index ; i >= 0 ; i -- ) if ( o . equals ( elementData [ i ] ) ) return i ; } return - 1 ;
|
public class InternalLocaleBuilder { /** * Set extension / private subtags in a single string representation */
public InternalLocaleBuilder setExtensions ( String subtags ) throws LocaleSyntaxException { } }
|
if ( subtags == null || subtags . length ( ) == 0 ) { clearExtensions ( ) ; return this ; } subtags = subtags . replaceAll ( BaseLocale . SEP , LanguageTag . SEP ) ; StringTokenIterator itr = new StringTokenIterator ( subtags , LanguageTag . SEP ) ; List < String > extensions = null ; String privateuse = null ; int parsed = 0 ; int start ; // Make a list of extension subtags
while ( ! itr . isDone ( ) ) { String s = itr . current ( ) ; if ( LanguageTag . isExtensionSingleton ( s ) ) { start = itr . currentStart ( ) ; String singleton = s ; StringBuilder sb = new StringBuilder ( singleton ) ; itr . next ( ) ; while ( ! itr . isDone ( ) ) { s = itr . current ( ) ; if ( LanguageTag . isExtensionSubtag ( s ) ) { sb . append ( LanguageTag . SEP ) . append ( s ) ; parsed = itr . currentEnd ( ) ; } else { break ; } itr . next ( ) ; } if ( parsed < start ) { throw new LocaleSyntaxException ( "Incomplete extension '" + singleton + "'" , start ) ; } if ( extensions == null ) { extensions = new ArrayList < String > ( 4 ) ; } extensions . add ( sb . toString ( ) ) ; } else { break ; } } if ( ! itr . isDone ( ) ) { String s = itr . current ( ) ; if ( LanguageTag . isPrivateusePrefix ( s ) ) { start = itr . currentStart ( ) ; StringBuilder sb = new StringBuilder ( s ) ; itr . next ( ) ; while ( ! itr . isDone ( ) ) { s = itr . current ( ) ; if ( ! LanguageTag . isPrivateuseSubtag ( s ) ) { break ; } sb . append ( LanguageTag . SEP ) . append ( s ) ; parsed = itr . currentEnd ( ) ; itr . next ( ) ; } if ( parsed <= start ) { throw new LocaleSyntaxException ( "Incomplete privateuse:" + subtags . substring ( start ) , start ) ; } else { privateuse = sb . toString ( ) ; } } } if ( ! itr . isDone ( ) ) { throw new LocaleSyntaxException ( "Ill-formed extension subtags:" + subtags . substring ( itr . currentStart ( ) ) , itr . currentStart ( ) ) ; } return setExtensions ( extensions , privateuse ) ;
|
public class Ifc2x3tc1PackageImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
public EClass getIfcFurnitureType ( ) { } }
|
if ( ifcFurnitureTypeEClass == null ) { ifcFurnitureTypeEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( Ifc2x3tc1Package . eNS_URI ) . getEClassifiers ( ) . get ( 262 ) ; } return ifcFurnitureTypeEClass ;
|
public class ConnectionFactoryRegistry { /** * Add a { @ link ConnectionFactory } to this registry .
* @ param connectionFactory the connection factory */
public void addConnectionFactory ( ConnectionFactory < ? > connectionFactory ) { } }
|
if ( connectionFactories . containsKey ( connectionFactory . getProviderId ( ) ) ) { throw new IllegalArgumentException ( "A ConnectionFactory for provider '" + connectionFactory . getProviderId ( ) + "' has already been registered" ) ; } Class < ? > apiType = GenericTypeResolver . resolveTypeArgument ( connectionFactory . getClass ( ) , ConnectionFactory . class ) ; if ( apiTypeIndex . containsKey ( apiType ) ) { throw new IllegalArgumentException ( "A ConnectionFactory for API [" + apiType . getName ( ) + "] has already been registered" ) ; } connectionFactories . put ( connectionFactory . getProviderId ( ) , connectionFactory ) ; apiTypeIndex . put ( apiType , connectionFactory . getProviderId ( ) ) ;
|
public class CrawlOverview { /** * Saves a screenshot of every new state . */
@ Override public void onNewState ( CrawlerContext context , StateVertex vertex ) { } }
|
LOG . debug ( "onNewState" ) ; StateBuilder state = outModelCache . addStateIfAbsent ( vertex ) ; visitedStates . putIfAbsent ( state . getName ( ) , vertex ) ; saveScreenshot ( context . getBrowser ( ) , state . getName ( ) , vertex ) ; outputBuilder . persistDom ( state . getName ( ) , context . getBrowser ( ) . getUnStrippedDom ( ) ) ;
|
public class HyperionClient { /** * Return the headers for the supplied request
* @ param request The data service request
* @ return The headers */
protected Headers getHeaders ( Request request ) { } }
|
Headers . Builder headers = new Headers . Builder ( ) ; MultiMap resolvedHeaders = null ; if ( headerFactory != null ) resolvedHeaders = headerFactory . getHeaders ( ) ; if ( hasEntries ( resolvedHeaders ) ) resolvedHeaders = resolvedHeaders . merge ( request . getHeaders ( ) ) ; else resolvedHeaders = request . getHeaders ( ) ; if ( authorizationFactory != null ) { MultiMap authEntries = authorizationFactory . getHeaders ( ) ; if ( hasEntries ( authEntries ) ) resolvedHeaders = resolvedHeaders . merge ( authEntries ) ; } if ( resolvedHeaders . getFirst ( "user-agent" ) == null ) headers . add ( "user-agent" , userAgent ) ; if ( resolvedHeaders . getFirst ( CLIENT_VERSION_HEADER_NAME ) == null ) headers . add ( CLIENT_VERSION_HEADER_NAME , getClientVersion ( ) ) ; for ( Map . Entry < String , List < String > > entry : resolvedHeaders . entries ( ) ) { for ( String value : entry . getValue ( ) ) { headers . add ( entry . getKey ( ) , value ) ; } } return headers . build ( ) ;
|
public class EnsemblRestClientFactory { /** * Create and return a new sequence service with the specified endpoint URL .
* @ since 1.3
* @ param endpointUrl endpoint URL , must not be null
* @ return a new sequence service with the specified endpoint URL */
public SequenceService createSequenceService ( final String endpointUrl ) { } }
|
return new RestAdapter . Builder ( ) . setEndpoint ( endpointUrl ) . setErrorHandler ( errorHandler ) . setConverter ( new JacksonSequenceConverter ( jsonFactory ) ) . build ( ) . create ( SequenceService . class ) ;
|
public class Utils { /** * Returns the given date time formatted using the given format and timezone .
* @ param dt The date to format ( in milliseconds )
* @ param tz The timezone for the date ( or null )
* @ param format The format to use for the date
* @ return The formatted date */
static public String getFormattedDateTime ( long dt , TimeZone tz , String format ) { } }
|
SimpleDateFormat df = new SimpleDateFormat ( format ) ; if ( tz != null ) df . setTimeZone ( tz ) ; return df . format ( new Date ( dt ) ) ;
|
public class InputStreamOrByteBufferAdapter { /** * Read a long at a specific offset ( without changing the current read point ) .
* @ param offset
* The buffer offset to read from .
* @ return The long at the buffer offset .
* @ throws IOException
* If there was an exception while reading . */
public long readLong ( final int offset ) throws IOException { } }
|
final int bytesToRead = Math . max ( 0 , offset + 8 - used ) ; if ( bytesToRead > 0 ) { readMore ( bytesToRead ) ; } return ( ( buf [ offset ] & 0xffL ) << 56 ) | ( ( buf [ offset + 1 ] & 0xffL ) << 48 ) | ( ( buf [ offset + 2 ] & 0xffL ) << 40 ) | ( ( buf [ offset + 3 ] & 0xffL ) << 32 ) | ( ( buf [ offset + 4 ] & 0xffL ) << 24 ) | ( ( buf [ offset + 5 ] & 0xffL ) << 16 ) | ( ( buf [ offset + 6 ] & 0xffL ) << 8 ) | ( buf [ offset + 7 ] & 0xffL ) ;
|
public class RosterExchangeManager { /** * Fires roster exchange listeners . */
private void fireRosterExchangeListeners ( Jid from , Iterator < RemoteRosterEntry > remoteRosterEntries ) { } }
|
RosterExchangeListener [ ] listeners ; synchronized ( rosterExchangeListeners ) { listeners = new RosterExchangeListener [ rosterExchangeListeners . size ( ) ] ; rosterExchangeListeners . toArray ( listeners ) ; } for ( RosterExchangeListener listener : listeners ) { listener . entriesReceived ( from , remoteRosterEntries ) ; }
|
public class Distribution { /** * < pre >
* Must be in increasing order of ` value ` field .
* < / pre >
* < code > repeated . google . api . Distribution . Exemplar exemplars = 10 ; < / code > */
public java . util . List < ? extends com . google . api . Distribution . ExemplarOrBuilder > getExemplarsOrBuilderList ( ) { } }
|
return exemplars_ ;
|
public class WebcamLogConfigurator { /** * Configure SLF4J .
* @ param is input stream to logback configuration xml */
public static void configure ( InputStream is ) { } }
|
ClassLoader cl = Thread . currentThread ( ) . getContextClassLoader ( ) ; try { String [ ] names = { "ch.qos.logback.classic.LoggerContext" , "ch.qos.logback.classic.joran.JoranConfigurator" , } ; for ( String name : names ) { Class . forName ( name , false , cl ) ; } ch . qos . logback . classic . LoggerContext context = ( ch . qos . logback . classic . LoggerContext ) LoggerFactory . getILoggerFactory ( ) ; ch . qos . logback . classic . joran . JoranConfigurator configurator = new ch . qos . logback . classic . joran . JoranConfigurator ( ) ; configurator . setContext ( context ) ; context . reset ( ) ; configurator . doConfigure ( is ) ; } catch ( ClassNotFoundException e ) { System . err . println ( "WLogC: Logback JARs are missing in classpath" ) ; } catch ( NoClassDefFoundError e ) { System . err . println ( "WLogC: Logback JARs are missing in classpath" ) ; } catch ( Throwable e ) { e . printStackTrace ( ) ; }
|
public class PinView { /** * Clear PinBoxes values */
public void clear ( ) { } }
|
for ( int i = 0 ; i < mNumberPinBoxes ; i ++ ) { getPinBox ( i ) . getText ( ) . clear ( ) ; } checkPinBoxesAvailableOrder ( ) ;
|
public class AbsFilesScanner { /** * / * list */
public List < T > list ( File [ ] files , int start , int count ) { } }
|
final List < T > results = new ArrayList < > ( ) ; scan ( files , start , count , new Function < File , Void > ( ) { @ Override public Void apply ( File file ) { results . add ( file2list ( file ) ) ; return null ; } } ) ; return results ;
|
public class PoolStatisticsImpl { /** * Add delta to total usage time
* @ param delta The value */
public void deltaTotalUsageTime ( long delta ) { } }
|
if ( enabled . get ( ) && delta > 0 ) { totalUsageTime . addAndGet ( delta ) ; totalUsageTimeInvocations . incrementAndGet ( ) ; if ( delta > maxUsageTime . get ( ) ) maxUsageTime . set ( delta ) ; }
|
public class CmsJspImageBean { /** * Adds a number of size variations to the source set . < p >
* In case the screen size is not really known , it may be a good idea to add
* some variations for large images to make sure there are some common options in case the basic
* image is very large . < p >
* @ param minWidth the minimum image width to add size variations for
* @ param maxWidth the maximum width size variation to create */
public void addSrcSetWidthVariants ( int minWidth , int maxWidth ) { } }
|
int imageWidth = getWidth ( ) ; if ( imageWidth > minWidth ) { // only add variants in case the image is larger then the given minimum
int srcSetMaxWidth = getSrcSetMaxWidth ( ) ; for ( double factor : m_sizeVariants ) { long width = Math . round ( imageWidth * factor ) ; if ( width > srcSetMaxWidth ) { if ( width <= maxWidth ) { setSrcSets ( createWidthVariation ( String . valueOf ( width ) ) ) ; } } else { break ; } } }
|
public class RottenTomatoesApi { /** * Detailed information on a specific movie specified by Id .
* @ param movieId RT Movie ID to locate
* @ return
* @ throws RottenTomatoesException */
public RTMovie getDetailedInfo ( int movieId ) throws RottenTomatoesException { } }
|
properties . clear ( ) ; properties . put ( ApiBuilder . PROPERTY_ID , String . valueOf ( movieId ) ) ; properties . put ( ApiBuilder . PROPERTY_URL , URL_MOVIES_INFO ) ; return response . getResponse ( RTMovie . class , properties ) ;
|
public class EmbeddableTransactionImpl { /** * Called by interceptor when incoming reply arrives .
* This polices the single threaded operation of the transaction . */
@ Override public void resumeAssociation ( ) { } }
|
final boolean traceOn = TraceComponent . isAnyTracingEnabled ( ) ; if ( traceOn && tc . isEntryEnabled ( ) ) Tr . entry ( tc , "resumeAssociation" ) ; resumeAssociation ( true ) ; if ( traceOn && tc . isEntryEnabled ( ) ) Tr . exit ( tc , "resumeAssociation" ) ;
|
public class CommercePriceEntryPersistenceImpl { /** * Returns the commerce price entry where commercePriceListId = & # 63 ; and CPInstanceUuid = & # 63 ; or throws a { @ link NoSuchPriceEntryException } if it could not be found .
* @ param commercePriceListId the commerce price list ID
* @ param CPInstanceUuid the cp instance uuid
* @ return the matching commerce price entry
* @ throws NoSuchPriceEntryException if a matching commerce price entry could not be found */
@ Override public CommercePriceEntry findByC_C ( long commercePriceListId , String CPInstanceUuid ) throws NoSuchPriceEntryException { } }
|
CommercePriceEntry commercePriceEntry = fetchByC_C ( commercePriceListId , CPInstanceUuid ) ; if ( commercePriceEntry == null ) { StringBundler msg = new StringBundler ( 6 ) ; msg . append ( _NO_SUCH_ENTITY_WITH_KEY ) ; msg . append ( "commercePriceListId=" ) ; msg . append ( commercePriceListId ) ; msg . append ( ", CPInstanceUuid=" ) ; msg . append ( CPInstanceUuid ) ; msg . append ( "}" ) ; if ( _log . isDebugEnabled ( ) ) { _log . debug ( msg . toString ( ) ) ; } throw new NoSuchPriceEntryException ( msg . toString ( ) ) ; } return commercePriceEntry ;
|
public class DefaultLdapAccountStateHandler { /** * Handle an account state warning produced by ldaptive account state machinery .
* Override this method to provide custom warning message handling .
* @ param warning the account state warning messages .
* @ param response Ldaptive authentication response .
* @ param configuration Password policy configuration .
* @ param messages Container for messages produced by account state warning handling . */
protected void handleWarning ( final AccountState . Warning warning , final AuthenticationResponse response , final PasswordPolicyConfiguration configuration , final List < MessageDescriptor > messages ) { } }
|
LOGGER . debug ( "Handling account state warning [{}]" , warning ) ; if ( warning == null ) { LOGGER . debug ( "Account state warning not defined" ) ; return ; } if ( warning . getExpiration ( ) != null ) { val expDate = DateTimeUtils . zonedDateTimeOf ( warning . getExpiration ( ) ) ; val ttl = ZonedDateTime . now ( ZoneOffset . UTC ) . until ( expDate , ChronoUnit . DAYS ) ; LOGGER . debug ( "Password expires in [{}] days. Expiration warning threshold is [{}] days." , ttl , configuration . getPasswordWarningNumberOfDays ( ) ) ; if ( configuration . isAlwaysDisplayPasswordExpirationWarning ( ) || ttl < configuration . getPasswordWarningNumberOfDays ( ) ) { messages . add ( new PasswordExpiringWarningMessageDescriptor ( "Password expires in {0} days." , ttl ) ) ; } } else { LOGGER . debug ( "No account expiration warning was provided as part of the account state" ) ; } if ( warning . getLoginsRemaining ( ) > 0 ) { messages . add ( new DefaultMessageDescriptor ( "password.expiration.loginsRemaining" , "You have {0} logins remaining before you MUST change your password." , new Serializable [ ] { warning . getLoginsRemaining ( ) } ) ) ; }
|
public class DomUtils { /** * To get all the textual content in the dom
* @ param document
* @ param individualTokens : default True : when set to true , each text node from dom is used to build the
* text content : when set to false , the text content of whole is obtained at once .
* @ return */
public static String getTextContent ( Document document , boolean individualTokens ) { } }
|
String textContent = null ; if ( individualTokens ) { List < String > tokens = getTextTokens ( document ) ; textContent = StringUtils . join ( tokens , "," ) ; } else { textContent = document . getDocumentElement ( ) . getTextContent ( ) . trim ( ) . replaceAll ( "\\s+" , "," ) ; } return textContent ;
|
public class HttpJsonRpcClient { /** * Create a new instance of { @ link OkHttpClient } . Sub - class my override this method to customize
* the { @ link OkHttpClient } instance .
* @ return
* @ since 0.9.1.6 */
protected OkHttpClient buildHttpClient ( ) { } }
|
OkHttpClient . Builder builder = new OkHttpClient . Builder ( ) . readTimeout ( readTimeoutMs , TimeUnit . MILLISECONDS ) . writeTimeout ( writeTimeoutMs , TimeUnit . MILLISECONDS ) ; OkHttpClient client = builder . build ( ) ; return client ;
|
public class Util { /** * write a command to the output stream */
static void write ( Command cmd , OutputStream out ) throws UnsupportedEncodingException , IOException { } }
|
encode ( cmd . getCommand ( ) , out ) ; for ( Parameter param : cmd . getParameters ( ) ) { encode ( String . format ( "=%s=%s" , param . getName ( ) , param . hasValue ( ) ? param . getValue ( ) : "" ) , out ) ; } String tag = cmd . getTag ( ) ; if ( ( tag != null ) && ! tag . equals ( "" ) ) { encode ( String . format ( ".tag=%s" , tag ) , out ) ; } List < String > props = cmd . getProperties ( ) ; if ( ! props . isEmpty ( ) ) { StringBuilder buf = new StringBuilder ( "=.proplist=" ) ; for ( int i = 0 ; i < props . size ( ) ; ++ i ) { if ( i > 0 ) { buf . append ( "," ) ; } buf . append ( props . get ( i ) ) ; } encode ( buf . toString ( ) , out ) ; } for ( String query : cmd . getQueries ( ) ) { encode ( query , out ) ; } out . write ( 0 ) ;
|
public class GinMapBinder { /** * Returns a binding builder used to add a new entry in the map . Each key must be distinct ( and
* non - null ) . Bound providers will be evaluated each time the map is injected .
* It is an error to call this method without also calling one of the { @ code to } methods on the
* returned binding builder .
* Scoping elements independently is supported . Use the { @ code in } method to specify a binding
* scope . */
public GinLinkedBindingBuilder < V > addBinding ( K key ) { } }
|
BindingRecorder recorder = createRecorder ( ) ; if ( key instanceof String ) { recorder . bindConstant ( ) . to ( ( String ) key ) ; } else if ( key instanceof Enum < ? > ) { recorder . bindConstant ( ) . to ( ( Enum ) key ) ; } else if ( key instanceof Integer ) { recorder . bindConstant ( ) . to ( ( Integer ) key ) ; } else if ( key instanceof Long ) { recorder . bindConstant ( ) . to ( ( Long ) key ) ; } else if ( key instanceof Float ) { recorder . bindConstant ( ) . to ( ( Float ) key ) ; } else if ( key instanceof Double ) { recorder . bindConstant ( ) . to ( ( Double ) key ) ; } else if ( key instanceof Short ) { recorder . bindConstant ( ) . to ( ( Short ) key ) ; } else if ( key instanceof Boolean ) { recorder . bindConstant ( ) . to ( ( Boolean ) key ) ; } else if ( key instanceof Character ) { recorder . bindConstant ( ) . to ( ( Character ) key ) ; } else if ( key instanceof Class < ? > ) { recorder . bindConstant ( ) . to ( ( Class < ? > ) key ) ; } else { throw new IllegalArgumentException ( "Key type " + keyType + " is non-constant and can only be added using providers" ) ; } return recorder . bind ( valueType ) ;
|
public class Proxy { /** * called from generated proxies */
private < J , T extends Proxy < J > > T marshalConstructor ( final int opcode , final J implementation , final int version , final Class < T > newProxyCls , final long argsPointer ) { } }
|
try { final long wlProxy = WaylandClientCore . INSTANCE ( ) . wl_proxy_marshal_array_constructor ( this . pointer , opcode , argsPointer , InterfaceMeta . get ( newProxyCls ) . pointer . address ) ; return marshalProxy ( wlProxy , implementation , version , newProxyCls ) ; } catch ( final NoSuchMethodException | IllegalAccessException | InstantiationException | InvocationTargetException e ) { throw new RuntimeException ( "Uh oh, this is a bug!" , e ) ; }
|
public class CaseForEqBuilder { /** * Enum */
public < T extends Enum < T > > Cases < T , EnumExpression < T > > then ( T then ) { } }
|
return thenEnum ( ConstantImpl . create ( then ) ) ;
|
public class Criteria { /** * Adds and equals ( = ) criteria ,
* CUST _ ID = 10034
* attribute will NOT be translated into column name
* @ param column The column name to be used without translation
* @ param value An object representing the value of the column */
public void addColumnEqualTo ( String column , Object value ) { } }
|
// PAW
// SelectionCriteria c = ValueCriteria . buildEqualToCriteria ( column , value , getAlias ( ) ) ;
SelectionCriteria c = ValueCriteria . buildEqualToCriteria ( column , value , getUserAlias ( column ) ) ; c . setTranslateAttribute ( false ) ; addSelectionCriteria ( c ) ;
|
public class UtlJsp { /** * < p > Clip file name from full path . < / p >
* @ param pPath string
* @ return String file name */
public final String evalFileNameWithoutExtension ( final String pPath ) { } }
|
int idxStart = 0 ; int idxEnd = pPath . length ( ) ; int lastFs = pPath . lastIndexOf ( '/' ) ; if ( lastFs == - 1 ) { lastFs = pPath . lastIndexOf ( '\\' ) ; } if ( lastFs != - 1 ) { idxStart = lastFs + 1 ; } int idxDot = pPath . lastIndexOf ( '.' ) ; if ( idxDot != - 1 ) { idxEnd = idxDot ; } return pPath . substring ( idxStart , idxEnd ) ;
|
public class OWLDataMinCardinalityImpl_CustomFieldSerializer { /** * Deserializes the content of the object from the
* { @ link com . google . gwt . user . client . rpc . SerializationStreamReader } .
* @ param streamReader the { @ link com . google . gwt . user . client . rpc . SerializationStreamReader } to read the
* object ' s content from
* @ param instance the object instance to deserialize
* @ throws com . google . gwt . user . client . rpc . SerializationException
* if the deserialization operation is not
* successful */
@ Override public void deserializeInstance ( SerializationStreamReader streamReader , OWLDataMinCardinalityImpl instance ) throws SerializationException { } }
|
deserialize ( streamReader , instance ) ;
|
public class Delta { /** * Compute differential coding given an initial value . Output is written
* to a provided array : must have length " length " or better .
* @ param data
* data to be modified
* @ param start
* starting index
* @ param length
* number of integers to process
* @ param init
* initial value
* @ param out
* output array
* @ return next initial vale */
public static int delta ( int [ ] data , int start , int length , int init , int [ ] out ) { } }
|
for ( int i = length - 1 ; i > 0 ; -- i ) { out [ i ] = data [ start + i ] - data [ start + i - 1 ] ; } out [ 0 ] = data [ start ] - init ; return data [ start + length - 1 ] ;
|
public class JobServiceClient { /** * Updates specified job .
* < p > Typically , updated contents become visible in search results within 10 seconds , but it may
* take up to 5 minutes .
* < p > Sample code :
* < pre > < code >
* try ( JobServiceClient jobServiceClient = JobServiceClient . create ( ) ) {
* Job job = Job . newBuilder ( ) . build ( ) ;
* Job response = jobServiceClient . updateJob ( job ) ;
* < / code > < / pre >
* @ param job Required .
* < p > The Job to be updated .
* @ throws com . google . api . gax . rpc . ApiException if the remote call fails */
public final Job updateJob ( Job job ) { } }
|
UpdateJobRequest request = UpdateJobRequest . newBuilder ( ) . setJob ( job ) . build ( ) ; return updateJob ( request ) ;
|
public class JournalInputFile { /** * When we have processed the file , move it to the archive directory . */
public void closeAndRename ( File archiveDirectory ) throws JournalException { } }
|
try { xmlReader . close ( ) ; fileReader . close ( ) ; File archiveFile = new File ( archiveDirectory , file . getName ( ) ) ; /* * java . io . File . renameTo ( ) has a known bug when working across
* file - systems , see :
* http : / / bugs . sun . com / bugdatabase / view _ bug . do ? bug _ id = 4073756 So
* instead of this call : file . renameTo ( archiveFile ) ; We use the
* following line , and check for exception . . . */
try { FileMovingUtil . move ( file , archiveFile ) ; } catch ( IOException e ) { throw new JournalException ( "Failed to rename file from '" + file . getPath ( ) + "' to '" + archiveFile . getPath ( ) + "'" , e ) ; } } catch ( XMLStreamException e ) { throw new JournalException ( e ) ; } catch ( IOException e ) { throw new JournalException ( e ) ; }
|
public class KeySet { /** * Creates a key set that will retrieve all rows of a table or index . */
public static KeySet all ( ) { } }
|
return new KeySet ( true , ImmutableList . < Key > of ( ) , ImmutableList . < KeyRange > of ( ) ) ;
|
public class OptionGroupOption { /** * The option settings that are available ( and the default value ) for each option in an option group .
* < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use
* { @ link # setOptionGroupOptionSettings ( java . util . Collection ) } or
* { @ link # withOptionGroupOptionSettings ( java . util . Collection ) } if you want to override the existing values .
* @ param optionGroupOptionSettings
* The option settings that are available ( and the default value ) for each option in an option group .
* @ return Returns a reference to this object so that method calls can be chained together . */
public OptionGroupOption withOptionGroupOptionSettings ( OptionGroupOptionSetting ... optionGroupOptionSettings ) { } }
|
if ( this . optionGroupOptionSettings == null ) { setOptionGroupOptionSettings ( new com . amazonaws . internal . SdkInternalList < OptionGroupOptionSetting > ( optionGroupOptionSettings . length ) ) ; } for ( OptionGroupOptionSetting ele : optionGroupOptionSettings ) { this . optionGroupOptionSettings . add ( ele ) ; } return this ;
|
public class DefaultPropertyPlaceholderConfigurer { /** * @ param placeholderWithDefault default placeholder
* @ param props properties instance
* @ param systemPropertiesMode system properties mode
* @ return the resolved placeholder */
protected final String resolvePlaceholder ( final String placeholderWithDefault , final Properties props , final int systemPropertiesMode ) { } }
|
String placeholder = getPlaceholder ( placeholderWithDefault ) ; String resolved = super . resolvePlaceholder ( placeholder , props , systemPropertiesMode ) ; if ( resolved == null ) { resolved = getDefault ( placeholderWithDefault ) ; } return resolved ;
|
public class ImageBandMath { /** * Computes the standard deviation for each pixel across all bands in the { @ link Planar }
* image .
* @ param input Planar image - not modified
* @ param output Gray scale image containing average pixel values - modified
* @ param avg Input Gray scale image containing average image . Can be null */
public static void stdDev ( Planar < GrayF32 > input , GrayF32 output , @ Nullable GrayF32 avg ) { } }
|
stdDev ( input , output , avg , 0 , input . getNumBands ( ) - 1 ) ;
|
public class IdentityOperator { /** * Returns the selected candidates unaltered .
* @ param selectedCandidates The candidates to " evolve " ( or do
* nothing to in this case ) .
* @ param rng A source of randomness ( not used ) .
* @ return The unaltered candidates . */
public List < T > apply ( List < T > selectedCandidates , Random rng ) { } }
|
return new ArrayList < T > ( selectedCandidates ) ;
|
public class DeleteWorkGroupRequestMarshaller { /** * Marshall the given parameter object . */
public void marshall ( DeleteWorkGroupRequest deleteWorkGroupRequest , ProtocolMarshaller protocolMarshaller ) { } }
|
if ( deleteWorkGroupRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( deleteWorkGroupRequest . getWorkGroup ( ) , WORKGROUP_BINDING ) ; protocolMarshaller . marshall ( deleteWorkGroupRequest . getRecursiveDeleteOption ( ) , RECURSIVEDELETEOPTION_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
|
public class ZTFilePermissionsUtil { /** * Convert Posix mode to { @ link ZTFilePermissions }
* @ param mode
* @ return */
static ZTFilePermissions fromPosixFileMode ( int mode ) { } }
|
ZTFilePermissions permissions = new ZTFilePermissions ( ) ; permissions . setOwnerCanExecute ( ( mode & OWNER_EXECUTE_FLAG ) > 0 ) ; permissions . setGroupCanExecute ( ( mode & GROUP_EXECUTE_FLAG ) > 0 ) ; permissions . setOthersCanExecute ( ( mode & OTHERS_EXECUTE_FLAG ) > 0 ) ; permissions . setOwnerCanWrite ( ( mode & OWNER_WRITE_FLAG ) > 0 ) ; permissions . setGroupCanWrite ( ( mode & GROUP_WRITE_FLAG ) > 0 ) ; permissions . setOthersCanWrite ( ( mode & OTHERS_WRITE_FLAG ) > 0 ) ; permissions . setOwnerCanRead ( ( mode & OWNER_READ_FLAG ) > 0 ) ; permissions . setGroupCanRead ( ( mode & GROUP_READ_FLAG ) > 0 ) ; permissions . setOthersCanRead ( ( mode & OTHERS_READ_FLAG ) > 0 ) ; return permissions ;
|
public class SpatialAnchorsAccountsInner { /** * Regenerate 1 Key of a Spatial Anchors Account .
* @ param resourceGroupName Name of an Azure resource group .
* @ param spatialAnchorsAccountName Name of an Mixed Reality Spatial Anchors Account .
* @ param serial serial of key to be regenerated
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ throws ErrorResponseException thrown if the request is rejected by server
* @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
* @ return the SpatialAnchorsAccountKeysInner object if successful . */
public SpatialAnchorsAccountKeysInner regenerateKeys ( String resourceGroupName , String spatialAnchorsAccountName , Integer serial ) { } }
|
return regenerateKeysWithServiceResponseAsync ( resourceGroupName , spatialAnchorsAccountName , serial ) . toBlocking ( ) . single ( ) . body ( ) ;
|
public class MeterValue { /** * Return a new { @ link MeterValue } instance for the given String value . The value may
* contain a simple number , or a { @ link Duration duration string } .
* @ param value the source value
* @ return a { @ link MeterValue } instance */
public static MeterValue valueOf ( String value ) { } }
|
if ( isNumber ( value ) ) { return new MeterValue ( Long . parseLong ( value ) ) ; } return new MeterValue ( durationConverter . convert ( value ) ) ;
|
public class SegmentConstant { /** * This method parses the given { @ code bytes } from { @ code source } to { @ code byte [ ] } .
* @ param bytes are the bytes encoded as { @ link String } in the form { @ code HH ( [ - ] HH ) * } where { @ code HH } is a hex
* encoded byte ( [ 0-9A - F ] { 2 } ) . Example : " FE - 7A - 32 - BB " .
* @ param source describes where the given { @ code bytes } origin from . This is only used in case of an error for the
* exception message .
* @ return the converted byte array . */
static byte [ ] parseBytes ( String bytes , String source ) { } }
|
NlsNullPointerException . checkNotNull ( source , bytes ) ; int length = bytes . length ( ) ; if ( ( length == 0 ) || ( length % 3 != 0 ) ) { throw new NlsIllegalArgumentException ( bytes , source ) ; } length = length / 3 ; byte [ ] result = new byte [ length ] ; int start = 0 ; for ( int i = 0 ; i < length ; i ++ ) { String byteString = bytes . substring ( start , start + 1 ) ; result [ i ] = ( byte ) Integer . parseInt ( byteString , 16 ) ; if ( bytes . charAt ( start + 2 ) != '-' ) { throw new NlsIllegalArgumentException ( bytes , source ) ; } start = start + 3 ; } return result ;
|
public class SimpleAuthenticator { /** * Unregisters a PasswordAuthentication with a given URL address . */
public PasswordAuthentication unregisterPasswordAuthentication ( URL pURL ) { } }
|
return unregisterPasswordAuthentication ( NetUtil . createInetAddressFromURL ( pURL ) , pURL . getPort ( ) , pURL . getProtocol ( ) , null , BASIC ) ;
|
public class ReportingUtils { /** * handles Hadoop ' old ' and ' new ' API reporting classes , namely { @ link Reporter } and { @ link TaskInputOutputContext } */
@ SuppressWarnings ( { } }
|
"rawtypes" } ) static void report ( Progressable progressable , Stats stats ) { progressable = ( Progressable ) CompatHandler . unwrap ( progressable ) ; if ( progressable == null || progressable == Reporter . NULL ) { return ; } if ( progressable instanceof Reporter ) { Reporter reporter = ( Reporter ) progressable ; for ( Counter count : Counter . ALL ) { oldApiCounter ( reporter , count , count . get ( stats ) ) ; } } if ( progressable instanceof org . apache . hadoop . mapreduce . TaskInputOutputContext ) { TaskInputOutputContext compatTioc = CompatHandler . taskInputOutputContext ( ( org . apache . hadoop . mapreduce . TaskInputOutputContext ) progressable ) ; for ( Counter count : Counter . ALL ) { newApiCounter ( compatTioc , count , count . get ( stats ) ) ; } }
|
public class CollectorConfiguration { /** * This method merges the supplied configuration into this configuration . If
* a conflict is found , if overwrite is true then the supplied config element
* will be used , otherwise an exception will be raised .
* @ param config The configuration to merge
* @ param overwrite Whether to overwrite when conflict found
* @ throws IllegalArgumentException Failed to merge due to a conflict */
public void merge ( CollectorConfiguration config , boolean overwrite ) throws IllegalArgumentException { } }
|
for ( String key : config . getInstrumentation ( ) . keySet ( ) ) { if ( getInstrumentation ( ) . containsKey ( key ) && ! overwrite ) { throw new IllegalArgumentException ( "Instrumentation for '" + key + "' already exists" ) ; } getInstrumentation ( ) . put ( key , config . getInstrumentation ( ) . get ( key ) ) ; } for ( String key : config . getTransactions ( ) . keySet ( ) ) { if ( getTransactions ( ) . containsKey ( key ) && ! overwrite ) { throw new IllegalArgumentException ( "Transaction config for '" + key + "' already exists" ) ; } getTransactions ( ) . put ( key , config . getTransactions ( ) . get ( key ) ) ; } getProperties ( ) . putAll ( config . getProperties ( ) ) ;
|
public class ZipUtil { /** * This method check if the input argument is a zip directory containing files .
* @ param key
* @ return true if bv is a zip directory containing files , false otherwise . */
static boolean isZipDirectory ( Key key ) { } }
|
Iced ice = DKV . getGet ( key ) ; if ( ice == null ) throw new H2OIllegalArgumentException ( "Missing data" , "Did not find any data under " + "key " + key ) ; ByteVec bv = ( ByteVec ) ( ice instanceof ByteVec ? ice : ( ( Frame ) ice ) . vecs ( ) [ 0 ] ) ; return isZipDirectory ( bv ) ;
|
public class BusCommandContextFactory { /** * Creates a new { @ link BusCommandContext } with the given { @ code endpoint } .
* @ param endpoint the queue or topic the present request came from
* @ return a new { @ link BusCommandContext } */
public BusCommandContext newCommandContext ( Endpoint endpoint ) { } }
|
return new BusCommandContext ( endpoint , connectionFactoryProvider . getConnectionFactory ( ) , wsEndpoints . getUiClientSessions ( ) , wsEndpoints . getFeedSessions ( ) ) ;
|
public class SymSpell { /** * / < returns > True if file loaded , or false if file not found . < / returns > */
public boolean loadDictionary ( String corpus , int termIndex , int countIndex ) { } }
|
File file = new File ( corpus ) ; if ( ! file . exists ( ) ) { return false ; } BufferedReader br = null ; try { br = Files . newBufferedReader ( Paths . get ( corpus ) , StandardCharsets . UTF_8 ) ; } catch ( IOException ex ) { System . out . println ( ex . getMessage ( ) ) ; } if ( br == null ) { return false ; } return loadDictionary ( br , termIndex , countIndex ) ;
|
public class Loader { /** * Returns the class entry .
* @ param name name of the class */
protected ClassEntry getClassEntry ( String name , String pathName ) throws ClassNotFoundException { } }
|
// Find the path corresponding to the class
Path path = getPath ( pathName ) ; if ( path != null && path . length ( ) > 0 ) { return new ClassEntry ( _loader , name , path , path , getCodeSource ( path ) ) ; } else return null ;
|
public class InternalXbaseWithAnnotationsParser { /** * $ ANTLR start synpred33 _ InternalXbaseWithAnnotations */
public final void synpred33_InternalXbaseWithAnnotations_fragment ( ) throws RecognitionException { } }
|
// InternalXbaseWithAnnotations . g : 3550:6 : ( ( ( ( ruleJvmFormalParameter ) ) ' : ' ) )
// InternalXbaseWithAnnotations . g : 3550:7 : ( ( ( ruleJvmFormalParameter ) ) ' : ' )
{ // InternalXbaseWithAnnotations . g : 3550:7 : ( ( ( ruleJvmFormalParameter ) ) ' : ' )
// InternalXbaseWithAnnotations . g : 3551:7 : ( ( ruleJvmFormalParameter ) ) ' : '
{ // InternalXbaseWithAnnotations . g : 3551:7 : ( ( ruleJvmFormalParameter ) )
// InternalXbaseWithAnnotations . g : 3552:8 : ( ruleJvmFormalParameter )
{ // InternalXbaseWithAnnotations . g : 3552:8 : ( ruleJvmFormalParameter )
// InternalXbaseWithAnnotations . g : 3553:9 : ruleJvmFormalParameter
{ pushFollow ( FOLLOW_51 ) ; ruleJvmFormalParameter ( ) ; state . _fsp -- ; if ( state . failed ) return ; } } match ( input , 62 , FOLLOW_2 ) ; if ( state . failed ) return ; } }
|
public class ExtensionConfig { /** * Convert a list of { @ link ExtensionConfig } to a header value
* @ param configs the list of extension configs
* @ return the header value ( null if no configs present ) */
public static String toHeaderValue ( List < ExtensionConfig > configs ) { } }
|
if ( ( configs == null ) || ( configs . isEmpty ( ) ) ) { return null ; } StringBuilder parameters = new StringBuilder ( ) ; boolean needsDelim = false ; for ( ExtensionConfig ext : configs ) { if ( needsDelim ) { parameters . append ( ", " ) ; } parameters . append ( ext . getParameterizedName ( ) ) ; needsDelim = true ; } return parameters . toString ( ) ;
|
public class ParameterMapEntryMarshaller { /** * Marshall the given parameter object . */
public void marshall ( ParameterMapEntry parameterMapEntry , ProtocolMarshaller protocolMarshaller ) { } }
|
if ( parameterMapEntry == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( parameterMapEntry . getKey ( ) , KEY_BINDING ) ; protocolMarshaller . marshall ( parameterMapEntry . getValues ( ) , VALUES_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
|
public class DatabaseProvider { /** * Use an externally configured DataSource , Flavor , and optionally a shutdown hook .
* The shutdown hook may be null if you don ' t want calls to Builder . close ( ) to attempt
* any shutdown . The DataSource and Flavor are mandatory . */
@ CheckReturnValue public static Builder fromPool ( Pool pool ) { } }
|
return new BuilderImpl ( pool . poolShutdown , ( ) -> { try { return pool . dataSource . getConnection ( ) ; } catch ( Exception e ) { throw new DatabaseException ( "Unable to obtain a connection from the DataSource" , e ) ; } } , new OptionsDefault ( pool . flavor ) ) ;
|
public class BruteForceDiscordImplementation { /** * Finds the best discord . BRUTE FORCE algorithm .
* @ param series the data .
* @ param windowSize the SAX sliding window size .
* @ param globalRegistry the visit registry to use .
* @ param nThreshold the z - Normalization threshold value .
* @ return the best discord with respect to registry .
* @ throws Exception if error occurs . */
private static DiscordRecord findBestDiscordBruteForce ( double [ ] series , Integer windowSize , VisitRegistry globalRegistry , double nThreshold ) throws Exception { } }
|
Date start = new Date ( ) ; long distanceCallsCounter = 0 ; double bestSoFarDistance = - 1.0 ; int bestSoFarPosition = - 1 ; VisitRegistry outerRegistry = globalRegistry . clone ( ) ; int outerIdx = - 1 ; while ( - 1 != ( outerIdx = outerRegistry . getNextRandomUnvisitedPosition ( ) ) ) { // outer loop
outerRegistry . markVisited ( outerIdx ) ; // imo this is a useless piece
// check the global visits registry
// if ( globalRegistry . isVisited ( outerIdx ) ) {
// continue ;
double [ ] candidateSeq = tsProcessor . znorm ( tsProcessor . subseriesByCopy ( series , outerIdx , outerIdx + windowSize ) , nThreshold ) ; double nearestNeighborDistance = Double . MAX_VALUE ; VisitRegistry innerRegistry = new VisitRegistry ( series . length - windowSize ) ; int innerIdx ; while ( - 1 != ( innerIdx = innerRegistry . getNextRandomUnvisitedPosition ( ) ) ) { // inner loop
innerRegistry . markVisited ( innerIdx ) ; if ( Math . abs ( outerIdx - innerIdx ) > windowSize ) { // > means they shall not overlap even
// over a single point
double [ ] currentSubsequence = tsProcessor . znorm ( tsProcessor . subseriesByCopy ( series , innerIdx , innerIdx + windowSize ) , nThreshold ) ; double dist = ed . earlyAbandonedDistance ( candidateSeq , currentSubsequence , nearestNeighborDistance ) ; distanceCallsCounter ++ ; if ( ( ! Double . isNaN ( dist ) ) && dist < nearestNeighborDistance ) { nearestNeighborDistance = dist ; } } } if ( ! ( Double . isInfinite ( nearestNeighborDistance ) ) && nearestNeighborDistance > bestSoFarDistance ) { bestSoFarDistance = nearestNeighborDistance ; bestSoFarPosition = outerIdx ; LOGGER . trace ( "discord updated: pos {}, dist {}" , bestSoFarPosition , bestSoFarDistance ) ; } } Date firstDiscord = new Date ( ) ; LOGGER . debug ( "best discord found at {}, best distance: {}, in {} distance calls: {}" , bestSoFarPosition , bestSoFarDistance , SAXProcessor . timeToString ( start . getTime ( ) , firstDiscord . getTime ( ) ) , distanceCallsCounter ) ; DiscordRecord res = new DiscordRecord ( bestSoFarPosition , bestSoFarDistance ) ; res . setLength ( windowSize ) ; res . setInfo ( "distance calls: " + distanceCallsCounter ) ; return res ;
|
public class PropertyImpl { /** * { @ inheritDoc } */
public void setValue ( InputStream stream ) throws ValueFormatException , VersionException , LockException , ConstraintViolationException , RepositoryException { } }
|
setValue ( valueFactory . createValue ( stream ) ) ;
|
public class ScreenAwtAbstract { /** * Add a keyboard device . */
private void addDeviceKeyboard ( ) { } }
|
final KeyboardAwt keyboard = new KeyboardAwt ( ) ; addKeyboardListener ( keyboard ) ; devices . put ( Keyboard . class , keyboard ) ; devices . put ( InputDeviceDirectional . class , keyboard ) ;
|
public class MessageProcessRunnerTask { /** * Run this process .
* @ param job The process to run .
* @ param properties The properties to pass to this process . */
public void runProcess ( BaseProcess job , Map < String , Object > properties ) { } }
|
if ( properties == null ) properties = new HashMap < String , Object > ( ) ; Map < String , Object > propMessage = m_message . getMessageHeader ( ) . getProperties ( ) ; if ( propMessage != null ) properties . putAll ( propMessage ) ; properties . put ( DBParams . MESSAGE , m_message ) ; job . init ( this , null , properties ) ; job . run ( ) ; job . free ( ) ;
|
public class ServiceApiWrapper { /** * Upload content data .
* @ param token Comapi access token .
* @ param folder Folder name to put the file in .
* @ param data Content data .
* @ return Observable emitting details of uploaded content . */
Observable < ComapiResult < UploadContentResponse > > doUploadContent ( @ NonNull final String token , @ NonNull final String folder , @ Nullable final String name , @ NonNull final ContentData data ) { } }
|
return wrapObservable ( service . uploadContent ( AuthManager . addAuthPrefix ( token ) , apiSpaceId , folder , name , data . getBody ( ) ) . map ( mapToComapiResult ( ) ) , log , "Uploading content " + name ) ;
|
public class CheckInstrumentableClassAdapter { /** * Indicate whether or not the target method is instrumentable .
* @ param access
* the method property flags
* @ param methodName
* the name of the method
* @ return true if the method is not synthetic , is not native , and is
* not named toString or hashCode . */
public boolean isInstrumentableMethod ( int access , String methodName , String descriptor ) { } }
|
if ( ( access & Opcodes . ACC_SYNTHETIC ) != 0 ) { return false ; } if ( ( access & Opcodes . ACC_NATIVE ) != 0 ) { return false ; } if ( ( access & Opcodes . ACC_ABSTRACT ) != 0 ) { return false ; } if ( methodName . equals ( "toString" ) && descriptor . equals ( "()Ljava/lang/String;" ) ) { return false ; } if ( methodName . equals ( "hashCode" ) && descriptor . equals ( "()I" ) ) { return false ; } return true ;
|
public class XUnaryOperationImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
@ Override public boolean eIsSet ( int featureID ) { } }
|
switch ( featureID ) { case XbasePackage . XUNARY_OPERATION__OPERAND : return operand != null ; } return super . eIsSet ( featureID ) ;
|
public class IpHelper { /** * Determines if a specified host or IP refers to the local machine
* @ param addr
* String The host / IP
* @ return boolean True if the input points to the local machine , otherwise false */
public static boolean isLocalAddress ( String addr ) { } }
|
try { InetAddress iAddr = InetAddress . getByName ( addr ) ; return isLocalAddress ( iAddr ) ; } catch ( UnknownHostException e ) { return false ; }
|
public class AndroidMobileCommandHelper { /** * This method forms a { @ link Map } of parameters to
* Retrieve visibility and bounds information of the status and navigation bars .
* @ return a key - value pair . The key is the command name . The value is a { @ link Map } command arguments . */
public static Map . Entry < String , Map < String , ? > > getSystemBarsCommand ( ) { } }
|
return new AbstractMap . SimpleEntry < > ( GET_SYSTEM_BARS , ImmutableMap . of ( ) ) ;
|
public class PatternsImpl { /** * Deletes the patterns with the specified IDs .
* @ param appId The application ID .
* @ param versionId The version ID .
* @ param patternIds The patterns IDs .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the observable to the OperationStatus object */
public Observable < ServiceResponse < OperationStatus > > deletePatternsWithServiceResponseAsync ( UUID appId , String versionId , List < UUID > patternIds ) { } }
|
if ( this . client . endpoint ( ) == null ) { throw new IllegalArgumentException ( "Parameter this.client.endpoint() is required and cannot be null." ) ; } if ( appId == null ) { throw new IllegalArgumentException ( "Parameter appId is required and cannot be null." ) ; } if ( versionId == null ) { throw new IllegalArgumentException ( "Parameter versionId is required and cannot be null." ) ; } if ( patternIds == null ) { throw new IllegalArgumentException ( "Parameter patternIds is required and cannot be null." ) ; } Validator . validate ( patternIds ) ; String parameterizedHost = Joiner . on ( ", " ) . join ( "{Endpoint}" , this . client . endpoint ( ) ) ; return service . deletePatterns ( appId , versionId , patternIds , this . client . acceptLanguage ( ) , parameterizedHost , this . client . userAgent ( ) ) . flatMap ( new Func1 < Response < ResponseBody > , Observable < ServiceResponse < OperationStatus > > > ( ) { @ Override public Observable < ServiceResponse < OperationStatus > > call ( Response < ResponseBody > response ) { try { ServiceResponse < OperationStatus > clientResponse = deletePatternsDelegate ( response ) ; return Observable . just ( clientResponse ) ; } catch ( Throwable t ) { return Observable . error ( t ) ; } } } ) ;
|
public class ClassFile { /** * Returns the Procyon field definition for a specified variable ,
* or null if not found . */
public FieldDeclaration getFieldNode ( String name , String signature ) { } }
|
for ( EntityDeclaration node : type . getMembers ( ) ) { if ( node . getEntityType ( ) == EntityType . FIELD ) { FieldDeclaration field = ( FieldDeclaration ) node ; if ( field . getName ( ) . equals ( name ) && signature ( field . getReturnType ( ) ) . equals ( signature ) ) { return field ; } } } return null ;
|
public class CmsDriverManager { /** * Adds a new relation to the given resource . < p >
* @ param dbc the database context
* @ param resource the resource to add the relation to
* @ param target the target of the relation
* @ param type the type of the relation
* @ param importCase if importing relations
* @ throws CmsException if something goes wrong */
public void addRelationToResource ( CmsDbContext dbc , CmsResource resource , CmsResource target , CmsRelationType type , boolean importCase ) throws CmsException { } }
|
if ( type . isDefinedInContent ( ) ) { throw new CmsIllegalArgumentException ( Messages . get ( ) . container ( Messages . ERR_ADD_RELATION_IN_CONTENT_3 , dbc . removeSiteRoot ( resource . getRootPath ( ) ) , dbc . removeSiteRoot ( target . getRootPath ( ) ) , type . getLocalizedName ( dbc . getRequestContext ( ) . getLocale ( ) ) ) ) ; } CmsRelation relation = new CmsRelation ( resource , target , type ) ; getVfsDriver ( dbc ) . createRelation ( dbc , dbc . currentProject ( ) . getUuid ( ) , relation ) ; if ( ! importCase ) { // log it
log ( dbc , new CmsLogEntry ( dbc , resource . getStructureId ( ) , CmsLogEntryType . RESOURCE_ADD_RELATION , new String [ ] { relation . getSourcePath ( ) , relation . getTargetPath ( ) } ) , false ) ; // touch the resource
setDateLastModified ( dbc , resource , System . currentTimeMillis ( ) ) ; }
|
public class DescribeRemediationExecutionStatusRequestMarshaller { /** * Marshall the given parameter object . */
public void marshall ( DescribeRemediationExecutionStatusRequest describeRemediationExecutionStatusRequest , ProtocolMarshaller protocolMarshaller ) { } }
|
if ( describeRemediationExecutionStatusRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( describeRemediationExecutionStatusRequest . getConfigRuleName ( ) , CONFIGRULENAME_BINDING ) ; protocolMarshaller . marshall ( describeRemediationExecutionStatusRequest . getResourceKeys ( ) , RESOURCEKEYS_BINDING ) ; protocolMarshaller . marshall ( describeRemediationExecutionStatusRequest . getLimit ( ) , LIMIT_BINDING ) ; protocolMarshaller . marshall ( describeRemediationExecutionStatusRequest . getNextToken ( ) , NEXTTOKEN_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
|
public class ElevationService { /** * Processess the Javascript response and generates the required objects
* that are then passed back to the original callback .
* @ param results
* @ param status */
public void processResponse ( Object results , Object status ) { } }
|
ElevationStatus pStatus = ElevationStatus . UNKNOWN_ERROR ; if ( status instanceof String && results instanceof JSObject ) { pStatus = ElevationStatus . valueOf ( ( String ) status ) ; if ( ElevationStatus . OK . equals ( pStatus ) ) { JSObject jsres = ( JSObject ) results ; Object len = jsres . getMember ( "length" ) ; if ( len instanceof Number ) { int n = ( ( Number ) len ) . intValue ( ) ; ElevationResult [ ] ers = new ElevationResult [ n ] ; for ( int i = 0 ; i < n ; i ++ ) { Object obj = jsres . getSlot ( i ) ; if ( obj instanceof JSObject ) { ers [ i ] = new ElevationResult ( ( JSObject ) obj ) ; } } callback . elevationsReceived ( ers , pStatus ) ; return ; } } } callback . elevationsReceived ( new ElevationResult [ ] { } , pStatus ) ;
|
public class StreamProjection { /** * Projects a { @ link Tuple } { @ link DataStream } to the previously selected fields .
* @ return The projected DataStream .
* @ see Tuple
* @ see DataStream */
public < T0 , T1 > SingleOutputStreamOperator < Tuple2 < T0 , T1 > > projectTuple2 ( ) { } }
|
TypeInformation < ? > [ ] fTypes = extractFieldTypes ( fieldIndexes , dataStream . getType ( ) ) ; TupleTypeInfo < Tuple2 < T0 , T1 > > tType = new TupleTypeInfo < Tuple2 < T0 , T1 > > ( fTypes ) ; return dataStream . transform ( "Projection" , tType , new StreamProject < IN , Tuple2 < T0 , T1 > > ( fieldIndexes , tType . createSerializer ( dataStream . getExecutionConfig ( ) ) ) ) ;
|
public class ClassProcessorTask { /** * / * package private */
void markAsProcessed ( final CtClass ctClass , final ClassProcessor processor ) { } }
|
final ClassFile classFile = ctClass . getClassFile ( ) ; AnnotationsAttribute annotationAttribute = null ; Annotation annotation = null ; for ( final Object attributeObject : classFile . getAttributes ( ) ) { if ( attributeObject instanceof AnnotationsAttribute ) { annotationAttribute = ( AnnotationsAttribute ) attributeObject ; annotation = annotationAttribute . getAnnotation ( PROCESSED_ANNOTATION_CLASS ) ; if ( annotation != null ) { break ; } } } if ( annotation == null ) { _log . debug ( "Creating annotation attribute on: " + ctClass . getName ( ) ) ; annotationAttribute = new AnnotationsAttribute ( classFile . getConstPool ( ) , AnnotationsAttribute . invisibleTag ) ; classFile . addAttribute ( annotationAttribute ) ; _log . debug ( "Creating processed annotation on: " + ctClass . getName ( ) ) ; annotation = new Annotation ( PROCESSED_ANNOTATION_CLASS , classFile . getConstPool ( ) ) ; } MemberValue value = annotation . getMemberValue ( "value" ) ; if ( value == null ) { value = new ArrayMemberValue ( classFile . getConstPool ( ) ) ; annotation . addMemberValue ( "value" , value ) ; } final ArrayMemberValue valueArray = ( ArrayMemberValue ) value ; final MemberValue [ ] existingProcessorValues = valueArray . getValue ( ) ; final MemberValue [ ] newProcessorValues ; if ( existingProcessorValues != null ) { newProcessorValues = Arrays . copyOf ( valueArray . getValue ( ) , valueArray . getValue ( ) . length + 1 ) ; } else { newProcessorValues = new MemberValue [ 1 ] ; } newProcessorValues [ newProcessorValues . length - 1 ] = new StringMemberValue ( processor . getClass ( ) . getName ( ) , classFile . getConstPool ( ) ) ; valueArray . setValue ( newProcessorValues ) ; _log . debug ( "New processed annotation value on: " + ctClass . getName ( ) + " = " + valueArray ) ; annotationAttribute . addAnnotation ( annotation ) ;
|
public class HashUtils { /** * Hashes a string using the SHA - 256 algorithm .
* @ since 1.1
* @ param data the string to hash
* @ param charset the charset of the string
* @ return the SHA - 256 hash of the string
* @ throws NoSuchAlgorithmException the algorithm is not supported by existing providers */
public static byte [ ] sha256Hash ( String data , Charset charset ) throws NoSuchAlgorithmException { } }
|
return sha256Hash ( data . getBytes ( charset ) ) ;
|
public class MergeMojo { /** * Opens an OutputStream , based on the supplied file
* @ param target { @ linkplain File }
* @ return { @ linkplain OutputStream }
* @ throws MojoExecutionException */
protected OutputStream initOutput ( final File file ) throws MojoExecutionException { } }
|
// stream to return
final OutputStream stream ; // plenty of things can go wrong . . .
try { // directory ?
if ( file . isDirectory ( ) ) { throw new MojoExecutionException ( "File " + file . getAbsolutePath ( ) + " is directory!" ) ; } // already exists & & can ' t remove it ?
if ( file . exists ( ) && ! file . delete ( ) ) { throw new MojoExecutionException ( "Could not remove file: " + file . getAbsolutePath ( ) ) ; } // get directory above file file
final File fileDirectory = file . getParentFile ( ) ; // does not exist & & create it ?
if ( ! fileDirectory . exists ( ) && ! fileDirectory . mkdirs ( ) ) { throw new MojoExecutionException ( "Could not create directory: " + fileDirectory . getAbsolutePath ( ) ) ; } // moar wtf : parent directory is no directory ?
if ( ! fileDirectory . isDirectory ( ) ) { throw new MojoExecutionException ( "Not a directory: " + fileDirectory . getAbsolutePath ( ) ) ; } // file file is for any reason not creatable ?
if ( ! file . createNewFile ( ) ) { throw new MojoExecutionException ( "Could not create file: " + file . getAbsolutePath ( ) ) ; } // finally create some file
stream = new FileOutputStream ( file ) ; } catch ( FileNotFoundException e ) { throw new MojoExecutionException ( "Could not find file: " + file . getAbsolutePath ( ) , e ) ; } catch ( IOException e ) { throw new MojoExecutionException ( "Could not write to file: " + file . getAbsolutePath ( ) , e ) ; } // return
return stream ;
|
public class MapfishMapContext { /** * Return the map bounds rotated with the set rotation . The bounds are adapted to rounding changes of the
* size of the set paint area .
* @ return Rotated bounds . */
public MapBounds getRotatedBoundsAdjustedForPreciseRotatedMapSize ( ) { } }
|
Rectangle2D . Double paintAreaPrecise = getRotatedMapSizePrecise ( ) ; Rectangle paintArea = new Rectangle ( MapfishMapContext . rectangleDoubleToDimension ( paintAreaPrecise ) ) ; return getRotatedBounds ( paintAreaPrecise , paintArea ) ;
|
public class DateTieredCompactionStrategy { /** * Group files with similar min timestamp into buckets . Files with recent min timestamps are grouped together into
* buckets designated to short timespans while files with older timestamps are grouped into buckets representing
* longer timespans .
* @ param files pairs consisting of a file and its min timestamp
* @ param timeUnit
* @ param base
* @ param now
* @ return a list of buckets of files . The list is ordered such that the files with newest timestamps come first .
* Each bucket is also a list of files ordered from newest to oldest . */
@ VisibleForTesting static < T > List < List < T > > getBuckets ( Collection < Pair < T , Long > > files , long timeUnit , int base , long now ) { } }
|
// Sort files by age . Newest first .
final List < Pair < T , Long > > sortedFiles = Lists . newArrayList ( files ) ; Collections . sort ( sortedFiles , Collections . reverseOrder ( new Comparator < Pair < T , Long > > ( ) { public int compare ( Pair < T , Long > p1 , Pair < T , Long > p2 ) { return p1 . right . compareTo ( p2 . right ) ; } } ) ) ; List < List < T > > buckets = Lists . newArrayList ( ) ; Target target = getInitialTarget ( now , timeUnit ) ; PeekingIterator < Pair < T , Long > > it = Iterators . peekingIterator ( sortedFiles . iterator ( ) ) ; outerLoop : while ( it . hasNext ( ) ) { while ( ! target . onTarget ( it . peek ( ) . right ) ) { // If the file is too new for the target , skip it .
if ( target . compareToTimestamp ( it . peek ( ) . right ) < 0 ) { it . next ( ) ; if ( ! it . hasNext ( ) ) break outerLoop ; } else // If the file is too old for the target , switch targets .
target = target . nextTarget ( base ) ; } List < T > bucket = Lists . newArrayList ( ) ; while ( target . onTarget ( it . peek ( ) . right ) ) { bucket . add ( it . next ( ) . left ) ; if ( ! it . hasNext ( ) ) break ; } buckets . add ( bucket ) ; } return buckets ;
|
public class CmsEmbeddedDialogHandler { /** * Called when site and or project have been changed . < p >
* @ param sitePath the site path to the resource to display
* @ param serverLink the server link to the resource to display */
public void onSiteOrProjectChange ( String sitePath , String serverLink ) { } }
|
if ( m_frame != null ) { m_frame . removeFromParent ( ) ; m_frame = null ; } if ( m_handler != null ) { m_handler . onSiteOrProjectChange ( sitePath , serverLink ) ; } else { Window . Location . assign ( serverLink ) ; }
|
public class TimestampTracker { /** * Remove a timestamp ( of completed transaction ) */
public synchronized void removeTimestamp ( long ts ) throws NoSuchElementException { } }
|
Preconditions . checkState ( ! closed , "tracker closed " ) ; Preconditions . checkState ( allocationsInProgress > 0 , "allocationsInProgress should be > 0 " + allocationsInProgress ) ; Objects . requireNonNull ( node ) ; if ( timestamps . remove ( ts ) == false ) { throw new NoSuchElementException ( "Timestamp " + ts + " was previously removed or does not exist" ) ; } allocationsInProgress -- ;
|
public class GDiscreteFourierTransformOps { /** * Performs element - wise complex multiplication between two complex images .
* @ param complexA ( Input ) Complex image
* @ param complexB ( Input ) Complex image
* @ param complexC ( Output ) Complex image */
public static void multiplyComplex ( ImageInterleaved complexA , ImageInterleaved complexB , ImageInterleaved complexC ) { } }
|
if ( complexB instanceof InterleavedF32 ) { DiscreteFourierTransformOps . multiplyComplex ( ( InterleavedF32 ) complexA , ( InterleavedF32 ) complexB , ( InterleavedF32 ) complexC ) ; } else if ( complexB instanceof InterleavedF64 ) { DiscreteFourierTransformOps . multiplyComplex ( ( InterleavedF64 ) complexA , ( InterleavedF64 ) complexB , ( InterleavedF64 ) complexC ) ; } else { throw new IllegalArgumentException ( "Unknown image type" ) ; }
|
public class ElementDiff { /** * Compare two { @ link IChemObject } classes and return the difference as a { @ link String } .
* @ param first the first of the two classes to compare
* @ param second the second of the two classes to compare
* @ return a { @ link String } representation of the difference between the first and second { @ link IChemObject } . */
public static String diff ( IChemObject first , IChemObject second ) { } }
|
IDifference difference = difference ( first , second ) ; if ( difference == null ) { return "" ; } else { return difference . toString ( ) ; }
|
public class CmsJobEditView { /** * Binds the given component to the given bean property . < p >
* @ param field the component
* @ param property the bean property */
void bindField ( AbstractField < ? > field , String property ) { } }
|
m_group . bind ( field , property ) ; field . setCaption ( CmsVaadinUtils . getMessageText ( "label." + property ) ) ; field . setDescription ( CmsVaadinUtils . getMessageText ( "label." + property + ".help" ) ) ;
|
public class AWSApplicationDiscoveryClient { /** * Creates one or more tags for configuration items . Tags are metadata that help you categorize IT assets . This API
* accepts a list of multiple configuration items .
* @ param createTagsRequest
* @ return Result of the CreateTags operation returned by the service .
* @ throws AuthorizationErrorException
* The AWS user account does not have permission to perform the action . Check the IAM policy associated with
* this account .
* @ throws ResourceNotFoundException
* The specified configuration ID was not located . Verify the configuration ID and try again .
* @ throws InvalidParameterException
* One or more parameters are not valid . Verify the parameters and try again .
* @ throws InvalidParameterValueException
* The value of one or more parameters are either invalid or out of range . Verify the parameter values and
* try again .
* @ throws ServerInternalErrorException
* The server experienced an internal error . Try again .
* @ sample AWSApplicationDiscovery . CreateTags */
@ Override public CreateTagsResult createTags ( CreateTagsRequest request ) { } }
|
request = beforeClientExecution ( request ) ; return executeCreateTags ( request ) ;
|
public class IntHashMap { /** * Return an array with values in this map
* @ return array with values */
public V [ ] getValues ( ) { } }
|
final V [ ] array = factory . newArray ( elementCount ) ; int i = 0 ; for ( final V v : this ) { array [ i ++ ] = v ; } return array ;
|
public class AbstractProxyFactory { /** * Returns the invocation handler object of the given proxy object .
* @ param obj The object
* @ return The invocation handler if the object is an OJB proxy , or < code > null < / code >
* otherwise */
public IndirectionHandler getIndirectionHandler ( Object obj ) { } }
|
if ( obj == null ) { return null ; } else if ( isNormalOjbProxy ( obj ) ) { return getDynamicIndirectionHandler ( obj ) ; } else if ( isVirtualOjbProxy ( obj ) ) { return VirtualProxy . getIndirectionHandler ( ( VirtualProxy ) obj ) ; } else { return null ; }
|
public class PackagedProgram { /** * Returns the plan as generated from the Pact Assembler .
* @ return The program ' s plan .
* @ throws ProgramInvocationException Thrown , if an error occurred in the program while
* creating the program ' s { @ link Plan } . */
private Plan getPlan ( ) throws ProgramInvocationException { } }
|
if ( this . plan == null ) { Thread . currentThread ( ) . setContextClassLoader ( this . userCodeClassLoader ) ; this . plan = createPlanFromProgram ( this . program , this . args ) ; } return this . plan ;
|
public class AWSCostExplorerClient { /** * Queries for available tag keys and tag values for a specified period . You can search the tag values for an
* arbitrary string .
* @ param getTagsRequest
* @ return Result of the GetTags operation returned by the service .
* @ throws LimitExceededException
* You made too many calls in a short period of time . Try again later .
* @ throws BillExpirationException
* The requested report expired . Update the date interval and try again .
* @ throws DataUnavailableException
* The requested data is unavailable .
* @ throws InvalidNextTokenException
* The pagination token is invalid . Try again without a pagination token .
* @ throws RequestChangedException
* Your request parameters changed between pages . Try again with the old parameters or without a pagination
* token .
* @ sample AWSCostExplorer . GetTags
* @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / ce - 2017-10-25 / GetTags " target = " _ top " > AWS API
* Documentation < / a > */
@ Override public GetTagsResult getTags ( GetTagsRequest request ) { } }
|
request = beforeClientExecution ( request ) ; return executeGetTags ( request ) ;
|
public class MtasBasicParser { /** * Prevalidate object .
* @ param object the object
* @ param currentList the current list
* @ return the boolean */
Boolean prevalidateObject ( MtasParserObject object , Map < String , List < MtasParserObject > > currentList ) { } }
|
MtasParserType objectType = object . getType ( ) ; List < MtasParserMapping < ? > > mappings = objectType . getItems ( ) ; if ( mappings . isEmpty ( ) ) { return true ; } for ( MtasParserMapping < ? > mapping : mappings ) { try { precheckMappingConditions ( object , mapping . getConditions ( ) , currentList ) ; return true ; } catch ( MtasParserException e ) { log . debug ( e ) ; } } return false ;
|
public class DerbyDatabase { /** * Adds a column to a SQL table . The method overrides the original method ,
* because Derby does not allow for < code > NOT NULL < / code > columns that no
* default value is defined . Is such column is created , the default value
* for real and integer is < code > 0 < / code > , for date time , short and long
* string a zero length string .
* @ param _ con SQL connection
* @ param _ tableName name of table to update
* @ param _ columnName column to add
* @ param _ columnType type of column to add
* @ param _ defaultValue default value of the column ( or null if not
* specified )
* @ param _ length length of column to add ( or 0 if not specified )
* @ param _ scale scale of the column to add ( or 0 if not
* specified )
* @ throws SQLException if the column could not be added to the tables
* @ return this */
@ Override // CHECKSTYLE : OFF
public DerbyDatabase addTableColumn ( final Connection _con , final String _tableName , final String _columnName , final ColumnType _columnType , final String _defaultValue , final int _length , final int _scale ) throws SQLException { } }
|
// CHECKSTYLE : ON
String defaultValue = _defaultValue ; if ( defaultValue == null ) { switch ( _columnType ) { case INTEGER : case REAL : defaultValue = "0" ; break ; case DATETIME : case STRING_LONG : case STRING_SHORT : defaultValue = "''" ; break ; default : break ; } } return super . addTableColumn ( _con , _tableName , _columnName , _columnType , defaultValue , _length , _scale ) ;
|
public class GlobalVariablesParser { /** * Parses all variable definitions and adds those to the bean definition
* builder as property value .
* @ param builder the target bean definition builder .
* @ param element the source element . */
private void parseVariableDefinitions ( BeanDefinitionBuilder builder , Element element ) { } }
|
Map < String , String > testVariables = new LinkedHashMap < String , String > ( ) ; List < Element > variableElements = DomUtils . getChildElementsByTagName ( element , "variable" ) ; for ( Element variableDefinition : variableElements ) { testVariables . put ( variableDefinition . getAttribute ( "name" ) , variableDefinition . getAttribute ( "value" ) ) ; } if ( ! testVariables . isEmpty ( ) ) { builder . addPropertyValue ( "variables" , testVariables ) ; }
|
public class DeploymentOperations { /** * Creates an undeploy operation for each deployment description .
* If the { @ link UndeployDescription # isRemoveContent ( ) } returns { @ code true } the content will also be removed from
* the content repository . Otherwise the content will remain on the server and only the { @ code undeploy } operation
* will be executed .
* Note that the { @ link UndeployDescription # isFailOnMissing ( ) failOnMissing } is ignored and the operation will fail
* if any deployments being undeployed are missing .
* @ param undeployDescriptions the set of descriptions used to crate the operation
* @ return the undeploy operation */
public static Operation createUndeployOperation ( final Set < UndeployDescription > undeployDescriptions ) { } }
|
Assertions . requiresNotNullOrNotEmptyParameter ( "undeployDescriptions" , undeployDescriptions ) ; final CompositeOperationBuilder builder = CompositeOperationBuilder . create ( true ) ; for ( UndeployDescription undeployDescription : undeployDescriptions ) { addUndeployOperationStep ( builder , undeployDescription ) ; } return builder . build ( ) ;
|
public class KeepAliveManager { /** * Transport has received some data so that we can delay sending keepalives . */
public synchronized void onDataReceived ( ) { } }
|
stopwatch . reset ( ) . start ( ) ; // We do not cancel the ping future here . This avoids constantly scheduling and cancellation in
// a busy transport . Instead , we update the status here and reschedule later . So we actually
// keep one sendPing task always in flight when there ' re active rpcs .
if ( state == State . PING_SCHEDULED ) { state = State . PING_DELAYED ; } else if ( state == State . PING_SENT || state == State . IDLE_AND_PING_SENT ) { // Ping acked or effectively ping acked . Cancel shutdown , and then if not idle ,
// schedule a new keep - alive ping .
if ( shutdownFuture != null ) { shutdownFuture . cancel ( false ) ; } if ( state == State . IDLE_AND_PING_SENT ) { // not to schedule new pings until onTransportActive
state = State . IDLE ; return ; } // schedule a new ping
state = State . PING_SCHEDULED ; checkState ( pingFuture == null , "There should be no outstanding pingFuture" ) ; pingFuture = scheduler . schedule ( sendPing , keepAliveTimeInNanos , TimeUnit . NANOSECONDS ) ; }
|
public class ObjectFactory { /** * Create an instance of { @ link JAXBElement } { @ code < } { @ link LoanRequest }
* { @ code > } .
* @ param value the value
* @ return the JAXB element < loan response > */
@ XmlElementDecl ( namespace = "urn:switchyard-quickstart-demo:library:1.0" , name = "loanResponse" ) public JAXBElement < LoanResponse > createLoanResponse ( LoanResponse value ) { } }
|
return new JAXBElement < LoanResponse > ( LOAN_RESPONSE_QNAME , LoanResponse . class , null , value ) ;
|
public class CharData { /** * # end Getters / Setters */
public void setUVs ( float u , float v , float U , float V ) { } }
|
this . u = u ; this . v = v ; this . U = U ; this . V = V ;
|
public class Matrix4x3d { /** * Set this matrix to be a simple translation matrix .
* The resulting matrix can be multiplied against another transformation
* matrix to obtain an additional translation .
* @ param x
* the offset to translate in x
* @ param y
* the offset to translate in y
* @ param z
* the offset to translate in z
* @ return this */
public Matrix4x3d translation ( double x , double y , double z ) { } }
|
if ( ( properties & PROPERTY_IDENTITY ) == 0 ) this . identity ( ) ; m30 = x ; m31 = y ; m32 = z ; properties = PROPERTY_TRANSLATION | PROPERTY_ORTHONORMAL ; return this ;
|
public class ComplexMath_F64 { /** * Converts a complex number into polar notation .
* @ param input Standard notation
* @ param output Polar notation */
public static void convert ( Complex_F64 input , ComplexPolar_F64 output ) { } }
|
output . r = input . getMagnitude ( ) ; output . theta = Math . atan2 ( input . imaginary , input . real ) ;
|
public class StructrLDAPWrapper { /** * - - - - - private methods - - - - - */
public Rdn getRdn ( final LDAPNode node ) throws FrameworkException , LdapInvalidDnException { } }
|
String name = node . getUserProvidedName ( ) ; if ( name == null ) { name = node . getRdn ( ) ; } return new Rdn ( schemaManager , name ) ;
|
public class FluentMatchingR { /** * Sets a { @ code result } to be returned from { @ link FluentMatchingR # getMatch ( ) } if no match is
* found . */
public FluentMatchingR < T , R > orElse ( R result ) { } }
|
patterns . add ( Pattern . of ( t -> true , t -> result ) ) ; return this ;
|
public class vpnvserver { /** * Use this API to fetch filtered set of vpnvserver resources .
* filter string should be in JSON format . eg : " port : 80 , servicetype : HTTP " . */
public static vpnvserver [ ] get_filtered ( nitro_service service , String filter ) throws Exception { } }
|
vpnvserver obj = new vpnvserver ( ) ; options option = new options ( ) ; option . set_filter ( filter ) ; vpnvserver [ ] response = ( vpnvserver [ ] ) obj . getfiltered ( service , option ) ; return response ;
|
public class BaseOsgiServlet { /** * Get the file path from the request .
* @ param request
* @ return */
public String fixPathInfo ( String path ) { } }
|
if ( path == null ) return null ; if ( path . startsWith ( "/" ) ) path = path . substring ( 1 ) ; // Resources already start from root / baseURL
if ( baseURL == null ) if ( properties != null ) if ( this . getProperty ( BASE_PATH ) != null ) path = this . getProperty ( BASE_PATH ) + path ; return path ;
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.