signature stringlengths 43 39.1k | implementation stringlengths 0 450k |
|---|---|
public class TempUtils { /** * Creates a temporary directory prefixed with < code > prefix < / code > .
* @ param prefix folder prefix
* @ return temporary folder */
public static File createTempDir ( String prefix ) { } } | File baseDir = new File ( System . getProperty ( "java.io.tmpdir" ) ) ; String baseName = prefix + System . currentTimeMillis ( ) + "-" ; for ( int counter = 0 ; counter < 10000 ; counter ++ ) { File tempDir = new File ( baseDir , baseName + counter ) ; if ( tempDir . mkdir ( ) ) { return tempDir ; } } throw new IllegalStateException ( "Failed to create directory within " + 10000 + " attempts (tried " + baseName + "0 to " + baseName + ( 10000 - 1 ) + ')' ) ; |
public class AbstractMetricGroup { @ Override public MetricGroup addGroup ( int name ) { } } | return addGroup ( String . valueOf ( name ) , ChildType . GENERIC ) ; |
public class CassandraDataHandlerBase { /** * Gets the thrift row .
* @ param id
* the id
* @ param columnFamily
* the column family
* @ param thriftRows
* the thrift rows
* @ return the thrift row */
private ThriftRow getThriftRow ( Object id , String columnFamily , Map < String , ThriftRow > thriftRows ) { } } | ThriftRow tr = thriftRows . get ( columnFamily ) ; if ( tr == null ) { tr = new ThriftRow ( ) ; tr . setColumnFamilyName ( columnFamily ) ; // column - family name
tr . setId ( id ) ; // Id
thriftRows . put ( columnFamily , tr ) ; } return tr ; |
public class ImmutableList { /** * A synonym of { @ link # flatMap } .
* @ param f The function .
* @ param < B > The type of result function .
* @ return The result of bind . */
@ Nonnull public final < B > ImmutableList < B > bind ( @ Nonnull F < A , ImmutableList < B > > f ) { } } | return this . flatMap ( f ) ; |
public class HistoricSHE { /** * Initializes the cipher with key and IV
* @ param IV A 256 bit initialization vector to use for the cipher .
* @ param key A 256 bit key to encrypt with . */
public void init ( byte [ ] IV , byte [ ] key ) { } } | if ( IV . length != blockSize || key . length != blockSize ) throw new RuntimeException ( "key and IV need to be same as block size (" + blockSize + ")." ) ; // TODO subclass exception
this . key = key ; this . IV = IV ; prehash = Misc . cleanXOR ( IV , key ) ; cfg = true ; blockNo = 0 ; buffer = new byte [ blockSize ] ; bufferIndex = 0 ; |
public class GeneticAlgorithm { /** * Sets the number of best chromosomes to copy to new population . Sets the
* value to zero to disable elitism selection . When creating new population
* by crossover and mutation , we have a big chance , that we will loose the
* best chromosome . Elitism first copies the best chromosome ( or a few best
* chromosomes ) to new population . The rest is done in classical way .
* Elitism can very rapidly increase performance of GA , because it prevents
* losing the best found solution . */
public GeneticAlgorithm < T > setElitism ( int elitism ) { } } | if ( elitism < 0 || elitism >= size ) { throw new IllegalArgumentException ( "Invalid elitism: " + elitism ) ; } this . elitism = elitism ; return this ; |
public class ResponseDefinitionBodyMatcherDeserializer { /** * Partially based off https : / / stackoverflow . com / a / 12090818 */
public static long parseFilesize ( String in ) { } } | String cleanedInput = in . trim ( ) . replaceAll ( "," , "." ) ; final Matcher m = Pattern . compile ( "^([\\d.]+)\\s*(\\w)?b?$" , Pattern . CASE_INSENSITIVE ) . matcher ( cleanedInput ) ; if ( ! m . find ( ) ) { throw new IllegalArgumentException ( "Invalid size string: \"" + in + "\"" ) ; } int scale = 1 ; if ( m . group ( 2 ) != null ) { switch ( m . group ( 2 ) . toUpperCase ( ) ) { case "G" : scale *= 1024 ; case "M" : scale *= 1024 ; case "K" : scale *= 1024 ; break ; default : throw new IllegalArgumentException ( "Invalid size unit: " + m . group ( 2 ) ) ; } } return Math . round ( Double . parseDouble ( m . group ( 1 ) ) * scale ) ; |
public class BlackListManager { /** * Test if a phone number is already in the black list .
* @ param phoneNumber
* @ return return true if the phone number is in the bl , false otherwise */
public boolean isPhoneNumberInBlackList ( final String phoneNumber ) { } } | logger . info ( "Receive request for isPhoneNumberInBlackList : " + phoneNumber ) ; boolean retVal = daoService . isPhoneNumberInBlackList ( phoneNumber ) ; logger . info ( "Response for getListPhoneNumbersInBlackList request : " + phoneNumber + " is : " + retVal ) ; return retVal ; |
public class ConfigClassAnalyzer { /** * Strips the first decoration from the decorated object . If the object is not decorated the method
* returns the object itself . */
public static Object stripShallow ( Object decorated ) { } } | try { InjectableProperty delegateProperty = new ConfigClassAnalyzer ( decorated . getClass ( ) ) . getDelegateProperty ( ) ; if ( delegateProperty == null ) { return decorated ; } else { return delegateProperty . getValue ( decorated ) ; } } catch ( Exception e ) { throw new RuntimeException ( e ) ; } |
public class LevelRipConverter { /** * Check the pixel by searching tile on sheet .
* @ param map The destination map reference .
* @ param tileRef The tile sheet .
* @ param progressTileX The progress on horizontal tiles .
* @ param progressTileY The progress on vertical tiles .
* @ return < code > true < / code > if tile found , < code > false < / code > else . */
private static boolean checkPixel ( MapTile map , ImageBuffer tileRef , int progressTileX , int progressTileY ) { } } | final int x = progressTileX * map . getTileWidth ( ) ; final int y = progressTileY * map . getTileHeight ( ) ; final int pixel = tileRef . getRgb ( x , y ) ; // Skip blank tile of image map
if ( TilesExtractor . IGNORED_COLOR_VALUE != pixel ) { // Search if tile is on sheet and get it
final Tile tile = searchForTile ( map , tileRef , progressTileX , progressTileY ) ; if ( tile == null ) { return false ; } map . setTile ( tile ) ; } return true ; |
public class TldLearning { /** * Select positive and negative examples based on the region the user ' s initially selected region . The selected
* region is used as a positive example while all the other regions far away are used as negative examples .
* @ param targetRegion user selected region
* @ param cascadeRegions Set of regions used by the cascade detector */
public void initialLearning ( Rectangle2D_F64 targetRegion , FastQueue < ImageRectangle > cascadeRegions ) { } } | storageMetric . reset ( ) ; fernNegative . clear ( ) ; // learn the initial descriptor
TldHelperFunctions . convertRegion ( targetRegion , targetRegion_I32 ) ; // select the variance the first time using user selected region
variance . selectThreshold ( targetRegion_I32 ) ; // add positive examples
template . addDescriptor ( true , targetRegion_I32 ) ; fern . learnFernNoise ( true , targetRegion_I32 ) ; // Find all the regions which can be used to learn a negative descriptor
for ( int i = 0 ; i < cascadeRegions . size ; i ++ ) { ImageRectangle r = cascadeRegions . get ( i ) ; // see if it passes the variance test
if ( ! variance . checkVariance ( r ) ) continue ; // learn features far away from the target region
double overlap = helper . computeOverlap ( targetRegion_I32 , r ) ; if ( overlap > config . overlapLower ) continue ; fernNegative . add ( r ) ; } // randomize which regions are used
// Collections . shuffle ( fernNegative , rand ) ;
int N = fernNegative . size ( ) ; // Math . min ( config . numNegativeFerns , fernNegative . size ( ) ) ;
for ( int i = 0 ; i < N ; i ++ ) { fern . learnFern ( false , fernNegative . get ( i ) ) ; } // run detection algorithm and if there is an ambiguous solution mark it as not target
detection . detectionCascade ( cascadeRegions ) ; learnAmbiguousNegative ( targetRegion ) ; |
public class ExternalType { /** * Writes the { @ link Integer } to the output .
* This method and its equivalent { @ link # readInteger ( ObjectInput )
* read - variant } store { @ code i } in a more efficient way than serializing
* the { @ link Integer } class .
* @ param out Non - null { @ link ObjectOutput }
* @ param i { @ link Integer } ; may be null
* @ throws IOException Thrown if an I / O error occurred
* @ see # readInteger ( ObjectInput ) */
public static void writeInteger ( final ObjectOutput out , final Integer i ) throws IOException { } } | if ( i == null ) { out . writeByte ( 0 ) ; return ; } out . writeByte ( 1 ) ; out . writeInt ( i ) ; |
public class GZipServletResponseWrapper { /** * Closes the stream .
* @ throws IOException maybe */
public void close ( ) throws IOException { } } | // PrintWriter . close does not throw exceptions . Thus , the call does not need
// be inside a try - catch block .
if ( this . printWriter != null ) { this . printWriter . close ( ) ; } if ( this . gzipOutputStream != null ) { this . gzipOutputStream . close ( ) ; } |
public class TaskGetKey { /** * Top - level non - recursive invoke */
@ Override public void dinvoke ( H2ONode sender ) { } } | _h2o = sender ; Key k = _key ; _key = null ; // Not part of the return result
assert k . home ( ) ; // Gets are always from home ( less we do replication )
// Shipping a result ? Track replicas so we can invalidate . There ' s a
// narrow race on a moving K / V mapping tracking this Value just as it gets
// deleted - in which case , simply retry for another Value .
do _val = H2O . get ( k ) ; // The return result
while ( _val != null && ! _val . setReplica ( sender ) ) ; tryComplete ( ) ; |
public class Cell { /** * Gets the width as a String .
* @ returna value */
public String getWidthAsString ( ) { } } | String w = String . valueOf ( width ) ; if ( w . endsWith ( ".0" ) ) w = w . substring ( 0 , w . length ( ) - 2 ) ; if ( percentage ) w += "%" ; return w ; |
public class FlowContainerDataSource { /** * - - - - - public static methods - - - - - */
public static Iterable < GraphObject > map ( final Iterable < Object > src ) { } } | return Iterables . map ( ( Object t ) -> { if ( t instanceof GraphObject ) { return ( GraphObject ) t ; } else if ( t instanceof Map ) { return Function . toGraphObjectMap ( ( Map ) t ) ; } else { return ( GraphObject ) UiFunction . toGraphObject ( t , 1 ) ; } } , src ) ; |
public class StoreUtils { /** * Returns an empty map with expected size matching the iterable size if
* it ' s of type Collection . Otherwise , an empty map with the default size is
* returned . */
public static < K , V > HashMap < K , V > newEmptyHashMap ( Iterable < ? > iterable ) { } } | if ( iterable instanceof Collection < ? > ) return Maps . newHashMapWithExpectedSize ( ( ( Collection < ? > ) iterable ) . size ( ) ) ; return Maps . newHashMap ( ) ; |
public class HBCIUtils { /** * Wandelt ein gegebenes Datums - Objekt in einen String um , der die Uhrzeit enthält .
* Das Format des erzeugten Strings ist abhängig von der gesetzten
* < em > HBCI4Java < / em > - Locale ( siehe Kernel - Parameter < code > kernel . locale . * < / code > ) .
* @ param date ein Datumsobjekt
* @ return die lokalisierte Darstellung der Uhrzeit als String */
public static String time2StringLocal ( Date date ) { } } | String ret ; try { ret = DateFormat . getTimeInstance ( DateFormat . SHORT , Locale . getDefault ( ) ) . format ( date ) ; } catch ( Exception e ) { throw new InvalidArgumentException ( date . toString ( ) ) ; } return ret ; |
public class MariaDbConnection { /** * Creates a < code > PreparedStatement < / code > object that will generate < code > ResultSet < / code >
* objects with the given type and concurrency . This method is the same as the
* < code > prepareStatement < / code > method above , but it allows the default result set type and
* concurrency to be overridden . The holdability of the created result sets can be determined by
* calling { @ link # getHoldability } .
* @ param sql a < code > String < / code > object that is the SQL statement to be sent
* to the database ; may contain one or more ' ? ' IN parameters
* @ param resultSetType a result set type ; one of < code > ResultSet . TYPE _ FORWARD _ ONLY < / code > ,
* < code > ResultSet . TYPE _ SCROLL _ INSENSITIVE < / code > , or
* < code > ResultSet . TYPE _ SCROLL _ SENSITIVE < / code >
* @ param resultSetConcurrency a concurrency type ; one of < code > ResultSet . CONCUR _ READ _ ONLY < / code >
* or
* < code > ResultSet . CONCUR _ UPDATABLE < / code >
* @ return a new PreparedStatement object containing the pre - compiled SQL statement that will
* produce < code > ResultSet < / code > objects with the given type and concurrency
* @ throws SQLException if a database access error occurs , this method is called on a closed
* connection or the given parameters are not < code > ResultSet < / code > constants
* indicating type and concurrency */
public PreparedStatement prepareStatement ( final String sql , final int resultSetType , final int resultSetConcurrency ) throws SQLException { } } | return internalPrepareStatement ( sql , resultSetType , resultSetConcurrency , Statement . NO_GENERATED_KEYS ) ; |
public class Goal { /** * Gets the goalType value for this Goal .
* @ return goalType * The type of the goal for the { @ code LineItem } . It defines the
* period over which the goal
* for { @ code LineItem } should be reached . */
public com . google . api . ads . admanager . axis . v201808 . GoalType getGoalType ( ) { } } | return goalType ; |
public class BaseExchangeRateProvider { /** * Checks if an { @ link javax . money . convert . ExchangeRate } between two { @ link javax . money . CurrencyUnit } is
* available from this provider . This method should check , if a given rate
* is < i > currently < / i > defined .
* @ param conversionQuery the required { @ link javax . money . convert . ConversionQuery } , not { @ code null }
* @ return { @ code true } , if such an { @ link javax . money . convert . ExchangeRate } is currently
* defined . */
public boolean isAvailable ( ConversionQuery conversionQuery ) { } } | Objects . requireNonNull ( conversionQuery ) ; try { return conversionQuery . getProviderNames ( ) . isEmpty ( ) || conversionQuery . getProviderNames ( ) . contains ( getContext ( ) . getProviderName ( ) ) ; } catch ( Exception e ) { return false ; } |
public class SeaGlassTitlePane { /** * Install the defaults and update the Synth Style . */
private void installDefaults ( ) { } } | // Basic
setFont ( UIManager . getFont ( "InternalFrame.titleFont" ) ) ; closeButtonToolTip = UIManager . getString ( "InternalFrame.closeButtonToolTip" ) ; iconButtonToolTip = UIManager . getString ( "InternalFrame.iconButtonToolTip" ) ; restoreButtonToolTip = UIManager . getString ( "InternalFrame.restoreButtonToolTip" ) ; maxButtonToolTip = UIManager . getString ( "InternalFrame.maxButtonToolTip" ) ; // Synth
updateStyle ( this ) ; |
public class JQMForm { /** * Add the given submit button to the form and automatically have it set
* to submit the form on a click event . */
public void add ( JQMSubmit submit ) { } } | super . add ( submit ) ; submit . addClickHandler ( new ClickHandler ( ) { @ Override public void onClick ( ClickEvent event ) { submit ( ) ; } } ) ; |
public class ContactResourcesImpl { /** * Gets a list of the user ’ s Smartsheet Contacts .
* It mirrors to the following Smartsheet REST API method : GET / contacts
* @ param parameters the pagination parameters
* @ return the contacts as a paged list
* @ throws SmartsheetException */
public PagedResult < Contact > listContacts ( PaginationParameters parameters ) throws SmartsheetException { } } | String path = "contacts" ; if ( parameters != null ) { path += parameters . toQueryString ( ) ; } return this . listResourcesWithWrapper ( path , Contact . class ) ; |
public class MalisisItemBlock { /** * onItemUse needs to be overriden to be able to handle merged blocks and components . */
@ Override public EnumActionResult onItemUse ( EntityPlayer player , World world , BlockPos pos , EnumHand hand , EnumFacing side , float hitX , float hitY , float hitZ ) { } } | ItemStack itemStack = player . getHeldItem ( hand ) ; if ( itemStack . isEmpty ( ) ) return EnumActionResult . FAIL ; if ( ! player . canPlayerEdit ( pos . offset ( side ) , side , itemStack ) ) return EnumActionResult . FAIL ; // first check if the block clicked can be merged with the one in hand
IBlockState placedState = checkMerge ( itemStack , player , world , pos , side , hitX , hitY , hitZ ) ; BlockPos p = pos ; // can ' t merge , offset the placed position to where the block should be placed in the world
if ( placedState == null ) { p = pos . offset ( side ) ; float x = hitX - side . getFrontOffsetX ( ) ; float y = hitY - side . getFrontOffsetY ( ) ; float z = hitZ - side . getFrontOffsetZ ( ) ; // check for merge at the new position too
placedState = checkMerge ( itemStack , player , world , p , side , x , y , z ) ; } if ( placedState == null ) return super . onItemUse ( player , world , pos , hand , side , hitX , hitY , hitZ ) ; // block can be merged
Block block = placedState . getBlock ( ) ; if ( world . checkNoEntityCollision ( placedState . getCollisionBoundingBox ( world , p ) ) && world . setBlockState ( p , placedState , 3 ) ) { SoundType soundType = block . getSoundType ( placedState , world , pos , player ) ; world . playSound ( player , pos , soundType . getPlaceSound ( ) , SoundCategory . BLOCKS , ( soundType . getVolume ( ) + 1.0F ) / 2.0F , soundType . getPitch ( ) * 0.8F ) ; itemStack . shrink ( 1 ) ; } return EnumActionResult . SUCCESS ; |
public class Pointer { /** * < b > NOTE : < / b > This method does not take into account the position
* and array offset of the given buffer . In order to create a
* pointer that takes the position and array offset into account ,
* use the { @ link # toBuffer ( Buffer ) } method . < br / >
* < br / >
* If the given buffer has a backing array , then the returned
* pointer will in any case point to the start of the array ,
* even if the buffer has been created using the < code > slice < / code >
* method ( like { @ link ByteBuffer # slice ( ) } ) . If the buffer is
* direct , then this method will return a Pointer to the address
* of the direct buffer . If the buffer has been created using the
* < code > slice < / code > method , then this will be the actual start
* of the slice . Although this implies a different treatment of
* direct - and non direct buffers , the method is kept for
* backward compatibility . < br / >
* < br / >
* In both cases , for direct and array - based buffers , this method
* does not take into account the position of the given buffer . < br / >
* < br / >
* The buffer must not be null , and either be a direct buffer , or
* have a backing array
* @ param buffer The buffer the pointer should point to
* @ return The pointer
* @ throws IllegalArgumentException If the given buffer
* is null or is neither direct nor has a backing array */
public static Pointer to ( Buffer buffer ) { } } | if ( buffer == null || ( ! buffer . isDirect ( ) && ! buffer . hasArray ( ) ) ) { throw new IllegalArgumentException ( "Buffer may not be null and must have an array or be direct" ) ; } return new Pointer ( buffer ) ; |
public class hqlLexer { /** * $ ANTLR start " BAND " */
public final void mBAND ( ) throws RecognitionException { } } | try { int _type = BAND ; int _channel = DEFAULT_TOKEN_CHANNEL ; // hql . g : 744:6 : ( ' & ' )
// hql . g : 744:8 : ' & '
{ match ( '&' ) ; if ( state . failed ) return ; } state . type = _type ; state . channel = _channel ; } finally { // do for sure before leaving
} |
public class MutableHttpServletRequest { /** * Set the cachedInputStream as a copy of UTF - 8 encoded { @ link ByteArrayInputStream }
* @ param body { @ link String } body to create the { @ link ByteArrayInputStream } from */
public void setInputStream ( String body ) { } } | try ( ByteArrayInputStream stream = new ByteArrayInputStream ( body . getBytes ( DEFAULT_CHARSET ) ) ) { cachedInputStream = new ByteArrayOutputStream ( ) ; IOUtils . copy ( stream , cachedInputStream ) ; } catch ( IOException e ) { LOG . error ( "Exception on writing InputStream." , e ) ; } |
public class StaxServerConfiguration { /** * Bootstrap mechanism that loads the configuration for the server object based
* on the specified configuration reading mechanism .
* The reference implementation of the configuration is XML - based , but this interface
* allows for whatever mechanism is desired
* @ param configurationReader desired configuration reader */
private ServerConfiguration loadConfiguration ( ServerConfigurationReader configurationReader ) throws ConfigurationException { } } | ServerConfiguration configuration = configurationReader . read ( ) ; return configuration ; |
public class JITUtils { /** * Add synthetic fields required by { @ link # loadClassConstant } .
* @ param cv The current class visitor .
* @ param classConstantFieldNames The set of class constant field names . */
public static void addClassConstantMembers ( ClassVisitor cv , Set < String > classConstantFieldNames ) { } } | if ( ! classConstantFieldNames . isEmpty ( ) ) { addClassConstantMethod ( cv ) ; // Generate synthetic fields for each of class constants . d676434
for ( String fieldName : classConstantFieldNames ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) Tr . debug ( tc , INDENT + "adding class constant field : " + fieldName ) ; FieldVisitor fv = cv . visitField ( ACC_STATIC + ACC_SYNTHETIC , fieldName , "Ljava/lang/Class;" , null , null ) ; fv . visitEnd ( ) ; } } |
public class CallOptions { /** * Returns a new { @ code CallOptions } with { @ code executor } to be used instead of the default
* executor specified with { @ link ManagedChannelBuilder # executor } . */
public CallOptions withExecutor ( @ Nullable Executor executor ) { } } | CallOptions newOptions = new CallOptions ( this ) ; newOptions . executor = executor ; return newOptions ; |
public class BooleanConstantField { /** * Loads this constant ' s settings from the Properties . If they don ' t exist this will do nothing . */
@ Override public void loadFromProperties ( Properties props ) { } } | String strValue = props . getProperty ( container . getSimpleName ( ) + "." + name ) ; if ( strValue == null ) return ; boolean intValue = Boolean . parseBoolean ( strValue ) ; set ( intValue ) ; |
public class CircuitBreakerStateImpl { /** * Implements the logic for recordResult for the cases where synchronization is required */
private void synchronizedRecordResult ( CircuitBreakerResult result ) { } } | synchronized ( this ) { switch ( state . get ( ) ) { case CLOSED : rollingWindow . record ( result ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "Recording result {0} in closed state: {1}" , result , rollingWindow ) ; } if ( rollingWindow . isOverThreshold ( ) ) { stateOpen ( ) ; } break ; case HALF_OPEN : if ( result == CircuitBreakerResult . FAILURE ) { stateOpen ( ) ; } else { halfOpenRunningExecutions -- ; halfOpenSuccessfulExecutions ++ ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "Recording result {0} in half-open state. Running executions: {1}, Current results: ({2}/{3})" , result , halfOpenRunningExecutions , halfOpenSuccessfulExecutions , policy . getSuccessThreshold ( ) ) ; } if ( halfOpenSuccessfulExecutions >= policy . getSuccessThreshold ( ) ) { stateClosed ( ) ; } } break ; case OPEN : if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "Recording result {0} in open state" , result ) ; } // Nothing else to do
break ; } } |
public class AfplibFactoryImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
public ExternalAlgorithmRGPADBDRY createExternalAlgorithmRGPADBDRYFromString ( EDataType eDataType , String initialValue ) { } } | ExternalAlgorithmRGPADBDRY result = ExternalAlgorithmRGPADBDRY . get ( initialValue ) ; if ( result == null ) throw new IllegalArgumentException ( "The value '" + initialValue + "' is not a valid enumerator of '" + eDataType . getName ( ) + "'" ) ; return result ; |
public class DefaultStorageService { /** * users whichare seen only by administrators ) */
@ Transactional ( readOnly = true ) @ Profile public int countEntityChildrenById ( String id ) throws NotFoundException { } } | return storageDao . countEntityChildrenById ( id ) ; |
public class HexDependencyResolver { /** * this method is used when there ' s a known version */
private String getSha1 ( String name , String version ) { } } | if ( dotHexCachePath == null || name == null || version == null ) { logger . warn ( "Can't calculate SHA1, missing information: .hex-cache = {}, name = {}, version = {}" , dotHexCachePath , name , version ) ; return null ; } File tarFile = new File ( dotHexCachePath + fileSeparator + name + Constants . DASH + version + TAR_EXTENSION ) ; try { return ChecksumUtils . calculateSHA1 ( tarFile ) ; } catch ( IOException e ) { logger . warn ( "Failed calculating SHA1 of {}. Make sure HEX is installed" , tarFile . getPath ( ) ) ; logger . debug ( "Error: {}" , e . getStackTrace ( ) ) ; return null ; } |
public class DefaultOsgiUtilsService { /** * tries to retrieve the service from the given service - tracker for the amount of milliseconds provided by the given
* timeout .
* @ throws OsgiServiceNotAvailableException if the service could not be found within the given timeout */
private static Object waitForServiceFromTracker ( ServiceTracker tracker , long timeout ) throws OsgiServiceNotAvailableException { } } | synchronized ( tracker ) { tracker . open ( ) ; try { return tracker . waitForService ( timeout ) ; } catch ( InterruptedException e ) { throw new OsgiServiceNotAvailableException ( e ) ; } finally { tracker . close ( ) ; } } |
public class ExpandedNameTable { /** * Given an expanded name represented by namespace , local name and node type ,
* return an ID . If the expanded - name does not exist in the internal tables ,
* the entry will be created , and the ID will be returned . Any additional
* nodes that are created that have this expanded name will use this ID .
* @ param namespace The namespace
* @ param localName The local name
* @ param type The node type
* @ return the expanded - name id of the node . */
public int getExpandedTypeID ( String namespace , String localName , int type ) { } } | return getExpandedTypeID ( namespace , localName , type , false ) ; |
public class CRFCliqueTree { /** * Computes the unnormalized log conditional distribution over values of the
* element at position pos in the sequence , conditioned on the values of the
* elements in all other positions of the provided sequence .
* @ param sequence
* the sequence containing the rest of the values to condition on
* @ param position
* the position of the element to give a distribution for
* @ return an array of type double , representing a probability distribution ;
* sums to 1.0 */
public double [ ] scoresOf ( int [ ] sequence , int position ) { } } | if ( position >= factorTables . length ) throw new RuntimeException ( "Index out of bounds: " + position ) ; // DecimalFormat nf = new DecimalFormat ( " # 0.000 " ) ;
// if ( position > 0 & & position < sequence . length - 1 ) System . out . println ( position
// + " : asking about " + sequence [ position - 1 ] + " ( " + sequence [ position ] +
// " ) " + sequence [ position + 1 ] ) ;
double [ ] probThisGivenPrev = new double [ numClasses ] ; double [ ] probNextGivenThis = new double [ numClasses ] ; // double [ ] marginal = new double [ numClasses ] ; / / for debugging only
// compute prob of this tag given the window - 1 previous tags , normalized
// extract the window - 1 previous tags , pad left with background if necessary
int prevLength = windowSize - 1 ; int [ ] prev = new int [ prevLength + 1 ] ; // leave an extra element for the
// label at this position
int i = 0 ; for ( ; i < prevLength - position ; i ++ ) { // will only happen if
// position - prevLength < 0
prev [ i ] = classIndex . indexOf ( backgroundSymbol ) ; } for ( ; i < prevLength ; i ++ ) { prev [ i ] = sequence [ position - prevLength + i ] ; } for ( int label = 0 ; label < numClasses ; label ++ ) { prev [ prev . length - 1 ] = label ; probThisGivenPrev [ label ] = factorTables [ position ] . unnormalizedLogProb ( prev ) ; // marginal [ label ] = factorTables [ position ] . logProbEnd ( label ) ; / / remove :
// for debugging only
} // ArrayMath . logNormalize ( probThisGivenPrev ) ;
// compute the prob of the window - 1 next tags given this tag
// extract the window - 1 next tags
int nextLength = windowSize - 1 ; if ( position + nextLength >= length ( ) ) { nextLength = length ( ) - position - 1 ; } FactorTable nextFactorTable = factorTables [ position + nextLength ] ; if ( nextLength != windowSize - 1 ) { for ( int j = 0 ; j < windowSize - 1 - nextLength ; j ++ ) { nextFactorTable = nextFactorTable . sumOutFront ( ) ; } } if ( nextLength == 0 ) { // we are asking about the prob of no sequence
Arrays . fill ( probNextGivenThis , 1.0 ) ; } else { int [ ] next = new int [ nextLength ] ; System . arraycopy ( sequence , position + 1 , next , 0 , nextLength ) ; for ( int label = 0 ; label < numClasses ; label ++ ) { // ask the factor table such that pos is the first position in the
// window
// probNextGivenThis [ label ] =
// factorTables [ position + nextLength ] . conditionalLogProbGivenFirst ( label ,
// next ) ;
// probNextGivenThis [ label ] =
// nextFactorTable . conditionalLogProbGivenFirst ( label , next ) ;
probNextGivenThis [ label ] = nextFactorTable . unnormalizedConditionalLogProbGivenFirst ( label , next ) ; } } // pointwise multiply
return ArrayMath . pairwiseAdd ( probThisGivenPrev , probNextGivenThis ) ; |
public class Email { /** * Add a recipient of the mail .
* @ param recipients
* one or several recipient to add
* @ return this instance for fluent chaining */
@ Override public Email recipient ( Recipient ... recipients ) { } } | this . recipients . addAll ( Arrays . asList ( recipients ) ) ; return this ; |
public class NormalizerStandardize { /** * Load the means and standard deviations from the file system
* @ param files the files to load from . Needs 4 files if normalizing labels , otherwise 2. */
public void load ( File ... files ) throws IOException { } } | setFeatureStats ( DistributionStats . load ( files [ 0 ] , files [ 1 ] ) ) ; if ( isFitLabel ( ) ) { setLabelStats ( DistributionStats . load ( files [ 2 ] , files [ 3 ] ) ) ; } |
public class Types { /** * Test if object instance is not null and extends or implements expected type . This predicate consider primitive and
* related boxing types as equivalent , e . g . < code > 1.23 < / code > is instance of { @ link Double } .
* @ param o object instance to test , possible null ,
* @ param t expected type .
* @ return true if instance is not null and extends or implements requested type . */
public static boolean isInstanceOf ( Object o , Type t ) { } } | if ( o == null ) { return false ; } if ( t instanceof Class ) { Class < ? > clazz = ( Class < ? > ) t ; if ( clazz . isPrimitive ( ) ) { return BOXING_MAP . get ( clazz ) == o . getClass ( ) ; } return clazz . isInstance ( o ) ; } return false ; |
public class AspectranDtdResolver { /** * Converts a public DTD into a local one .
* @ param publicId unused but required by EntityResolver interface
* @ param systemId the DTD that is being requested
* @ return the InputSource for the DTD
* @ throws SAXException if anything goes wrong */
@ Override public InputSource resolveEntity ( String publicId , String systemId ) throws SAXException { } } | if ( validating ) { try { InputSource source = null ; if ( publicId != null ) { String path = doctypeMap . get ( publicId . toUpperCase ( ) ) ; source = getInputSource ( path ) ; } if ( source == null && systemId != null ) { String path = doctypeMap . get ( systemId . toUpperCase ( ) ) ; source = getInputSource ( path ) ; } return source ; } catch ( Exception e ) { throw new SAXException ( e . toString ( ) ) ; } } else { return new InputSource ( new StringReader ( "" ) ) ; } |
public class BasicRegistry { /** * TODO this doesn ' t work for case insensitive */
@ Override public List < String > getGroupsForUser ( String userSecurityName ) throws EntryNotFoundException , RegistryException { } } | if ( userSecurityName == null ) { throw new IllegalArgumentException ( "userSecurityName is null" ) ; } if ( userSecurityName . isEmpty ( ) ) { throw new IllegalArgumentException ( "userSecurityName is an empty String" ) ; } if ( ! isValidUser ( userSecurityName ) ) { throw new EntryNotFoundException ( userSecurityName + " does not exist" ) ; } List < String > matched = new ArrayList < String > ( ) ; Set < String > groupNames = state . groups . keySet ( ) ; Iterator < String > itr = groupNames . iterator ( ) ; while ( itr . hasNext ( ) ) { String groupName = itr . next ( ) ; if ( state . groups . get ( groupName ) . contains ( userSecurityName ) ) { matched . add ( groupName ) ; } } return matched ; |
public class DoubleIntIndex { /** * Check if row indexed i is less than row indexed j
* @ param i the first index
* @ param j the second index
* @ return true or false */
private boolean lessThan ( int i , int j ) { } } | if ( sortOnValues ) { if ( values [ i ] < values [ j ] ) { return true ; } } else { if ( keys [ i ] < keys [ j ] ) { return true ; } } return false ; |
public class Mutation { /** * Returns a mutation that will delete the row with primary key { @ code key } . Exactly equivalent to
* { @ code delete ( table , KeySet . singleKey ( key ) ) } . */
public static Mutation delete ( String table , Key key ) { } } | return delete ( table , KeySet . singleKey ( key ) ) ; |
public class HeaderHandler { /** * Clear everything out of storage for this handler . */
public void clear ( ) { } } | if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "Clearing this header handler: " + this ) ; } this . num_items = 0 ; this . values . clear ( ) ; this . genericValues . clear ( ) ; |
public class AmazonSimpleWorkflowClient { /** * Returns the list of domains registered in the account . The results may be split into multiple pages . To retrieve
* subsequent pages , make the call again using the nextPageToken returned by the initial call .
* < note >
* This operation is eventually consistent . The results are best effort and may not exactly reflect recent updates
* and changes .
* < / note >
* < b > Access Control < / b >
* You can use IAM policies to control this action ' s access to Amazon SWF resources as follows :
* < ul >
* < li >
* Use a < code > Resource < / code > element with the domain name to limit the action to only specified domains . The
* element must be set to < code > arn : aws : swf : : AccountID : domain / * < / code > , where < i > AccountID < / i > is the account ID ,
* with no dashes .
* < / li >
* < li >
* Use an < code > Action < / code > element to allow or deny permission to call this action .
* < / li >
* < li >
* You cannot use an IAM policy to constrain this action ' s parameters .
* < / li >
* < / ul >
* If the caller doesn ' t have sufficient permissions to invoke the action , or the parameter values fall outside the
* specified constraints , the action fails . The associated event attribute ' s < code > cause < / code > parameter is set to
* < code > OPERATION _ NOT _ PERMITTED < / code > . For details and example IAM policies , see < a
* href = " http : / / docs . aws . amazon . com / amazonswf / latest / developerguide / swf - dev - iam . html " > Using IAM to Manage Access to
* Amazon SWF Workflows < / a > in the < i > Amazon SWF Developer Guide < / i > .
* @ param listDomainsRequest
* @ return Result of the ListDomains operation returned by the service .
* @ throws OperationNotPermittedException
* Returned when the caller doesn ' t have sufficient permissions to invoke the action .
* @ sample AmazonSimpleWorkflow . ListDomains
* @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / swf - 2012-01-25 / ListDomains " target = " _ top " > AWS API
* Documentation < / a > */
@ Override public DomainInfos listDomains ( ListDomainsRequest request ) { } } | request = beforeClientExecution ( request ) ; return executeListDomains ( request ) ; |
public class WalkingIterator { /** * Returns the next node in the set and advances the position of the
* iterator in the set . After a NodeIterator is created , the first call
* to nextNode ( ) returns the first node in the set .
* @ return The next < code > Node < / code > in the set being iterated over , or
* < code > null < / code > if there are no more members in that set . */
public int nextNode ( ) { } } | if ( m_foundLast ) return DTM . NULL ; // If the variable stack position is not - 1 , we ' ll have to
// set our position in the variable stack , so our variable access
// will be correct . Iterators that are at the top level of the
// expression need to reset the variable stack , while iterators
// in predicates do not need to , and should not , since their execution
// may be much later than top - level iterators .
// m _ varStackPos is set in setRoot , which is called
// from the execute method .
if ( - 1 == m_stackFrame ) { return returnNextNode ( m_firstWalker . nextNode ( ) ) ; } else { VariableStack vars = m_execContext . getVarStack ( ) ; // These three statements need to be combined into one operation .
int savedStart = vars . getStackFrame ( ) ; vars . setStackFrame ( m_stackFrame ) ; int n = returnNextNode ( m_firstWalker . nextNode ( ) ) ; // These two statements need to be combined into one operation .
vars . setStackFrame ( savedStart ) ; return n ; } |
public class RebuildWorkspacesRequest { /** * The WorkSpace to rebuild . You can specify a single WorkSpace .
* @ param rebuildWorkspaceRequests
* The WorkSpace to rebuild . You can specify a single WorkSpace . */
public void setRebuildWorkspaceRequests ( java . util . Collection < RebuildRequest > rebuildWorkspaceRequests ) { } } | if ( rebuildWorkspaceRequests == null ) { this . rebuildWorkspaceRequests = null ; return ; } this . rebuildWorkspaceRequests = new com . amazonaws . internal . SdkInternalList < RebuildRequest > ( rebuildWorkspaceRequests ) ; |
public class TypedMap { /** * Associates the specified value with the specified key in this map .
* If the map previously contained a mapping for
* the key , the old value is replaced .
* @ param pKey key with which the specified value is to be associated .
* @ param pValue value to be associated with the specified key .
* @ return previous value associated with specified key , or { @ code null }
* if there was no mapping for key . A { @ code null } return can
* also indicate that the map previously associated { @ code null }
* with the specified pKey , if the implementation supports
* { @ code null } values .
* @ throws IllegalArgumentException if the value is not compatible with the
* key .
* @ see TypedMap . Key */
public V put ( K pKey , V pValue ) { } } | if ( ! pKey . isCompatibleValue ( pValue ) ) { throw new IllegalArgumentException ( "incompatible value for key" ) ; } return entries . put ( pKey , pValue ) ; |
public class DServer { public synchronized void rem_obj_polling ( final String [ ] argin , final boolean with_db_upd ) throws DevFailed { } } | Util . out4 . println ( "In rem_obj_polling command" ) ; for ( final String arg : argin ) { Util . out4 . println ( "Input string = " + arg ) ; } // Check that parameters number is correct
if ( argin . length != 3 ) { Except . throw_exception ( "API_WrongNumberOfArgs" , "Incorrect number of inout arguments" , "DServer.rem_obj_polling" ) ; } // Find the device
final Util tg = Util . instance ( ) ; DeviceImpl dev = null ; try { dev = tg . get_device_by_name ( argin [ 0 ] ) ; } catch ( final DevFailed e ) { Except . re_throw_exception ( e , "API_DeviceNotFound" , "Device " + argin + " not found" , "DServer.rem_obj_polling" ) ; } // Check that the device is polled
assert dev != null ; if ( dev . is_polled ( ) == false ) { Except . throw_exception ( "API_DeviceNotPolled" , "Device " + argin [ 0 ] + " is not polled" , "DServer.rem_obj_polling_period" ) ; } // Find the wanted object in the list of device polled object
final String obj_type = argin [ 1 ] . toLowerCase ( ) ; final String obj_name = argin [ 2 ] . toLowerCase ( ) ; int type = Tango_POLL_CMD ; if ( obj_type . equals ( Tango_PollCommand ) ) { type = Tango_POLL_CMD ; } else if ( obj_type . equals ( Tango_PollAttribute ) ) { type = Tango_POLL_ATTR ; } else { Except . throw_exception ( "API_NotSupported" , "Object type " + obj_type + " not supported" , "DServer.rem_obj_polling_period" ) ; } final Vector poll_list = dev . get_poll_obj_list ( ) ; for ( int i = 0 ; i < poll_list . size ( ) ; i ++ ) { final PollObj poll_obj = ( PollObj ) poll_list . elementAt ( i ) ; if ( poll_obj . get_type ( ) == type ) { if ( poll_obj . get_name ( ) . equals ( obj_name ) ) { poll_list . remove ( i ) ; } } } Util . out4 . println ( "Sending cmd to polling thread" ) ; final TangoMonitor mon = tg . get_poll_monitor ( ) ; final PollThCmd shared_cmd = tg . get_poll_shared_cmd ( ) ; if ( shared_cmd . cmd_pending == true ) { mon . signal ( ) ; } shared_cmd . cmd_pending = true ; shared_cmd . cmd_code = Tango_POLL_REM_OBJ ; shared_cmd . dev = dev ; shared_cmd . name = obj_name ; shared_cmd . type = type ; mon . signal ( ) ; Util . out4 . println ( "Cmd sent to polling thread" ) ; // Wait for thread to execute command
boolean interrupted ; while ( shared_cmd . cmd_pending == true ) { interrupted = mon . wait_it ( Tango_DEFAULT_TIMEOUT ) ; if ( shared_cmd . cmd_pending == true && interrupted == false ) { // Util . out4
System . out . println ( "TIME OUT" ) ; Except . throw_exception ( "API_CommandTimedOut" , "Polling thread blocked !!!" , "DServer.rem_obj_polling" ) ; } } Util . out4 . println ( "Thread cmd normally executed" ) ; // Mark the device as non polled if this was the last polled object
if ( poll_list . size ( ) == 0 ) { dev . is_polled ( false ) ; } // Update database property . This means remove object entry in the
// polling
// properties if they exist or add it to the list of device not polled
// for automatic polling defined at command / attribute level .
// Do this if possible and wanted .
if ( with_db_upd && Util . _UseDb ) { final DbDatum db_info = new DbDatum ( "polled_attr" ) ; boolean update_needed = false ; if ( type == Tango_POLL_CMD ) { db_info . name = "polled_cmd" ; final Vector cmd_list = dev . get_polled_cmd ( ) ; int i ; for ( i = 0 ; i < cmd_list . size ( ) ; i ++ ) { final String s = ( String ) cmd_list . elementAt ( i ) ; if ( s . equals ( obj_name ) ) { cmd_list . remove ( i ) ; cmd_list . remove ( i ) ; db_info . insert ( stringVect2StringArray ( cmd_list ) ) ; update_needed = true ; break ; } i ++ ; } if ( update_needed == false ) { final Vector non_auto_cmd = dev . get_non_auto_polled_cmd ( ) ; for ( i = 0 ; i < non_auto_cmd . size ( ) ; i ++ ) { final String s = ( String ) non_auto_cmd . elementAt ( i ) ; if ( s . equals ( obj_name ) ) { break ; } } if ( i == cmd_list . size ( ) ) { non_auto_cmd . add ( obj_name ) ; db_info . name = "non_auto_polled_cmd" ; db_info . insert ( stringVect2StringArray ( non_auto_cmd ) ) ; update_needed = true ; } } } else { final Vector attr_list = dev . get_polled_attr ( ) ; int i ; for ( i = 0 ; i < attr_list . size ( ) ; i ++ ) { final String s = ( String ) attr_list . elementAt ( i ) ; if ( s . equals ( obj_name ) ) { attr_list . remove ( i ) ; attr_list . remove ( i ) ; db_info . insert ( stringVect2StringArray ( attr_list ) ) ; update_needed = true ; break ; } i ++ ; } if ( update_needed == false ) { final Vector non_auto_attr = dev . get_non_auto_polled_attr ( ) ; for ( i = 0 ; i < non_auto_attr . size ( ) ; i ++ ) { final String s = ( String ) non_auto_attr . elementAt ( i ) ; if ( s . equals ( obj_name ) ) { break ; } } if ( i == attr_list . size ( ) ) { non_auto_attr . add ( obj_name ) ; db_info . name = "non_auto_polled_cmd" ; db_info . insert ( stringVect2StringArray ( non_auto_attr ) ) ; update_needed = true ; } } } if ( update_needed == true ) { final DbDatum [ ] send_data = new DbDatum [ 1 ] ; send_data [ 0 ] = db_info ; dev . get_db_device ( ) . put_property ( send_data ) ; Util . out4 . println ( "Polling properties updated" ) ; } } |
public class ForceOrdering { /** * Executes the main FORCE algorithm .
* @ param formula the CNF formula for the ordering
* @ param hypergraph the hypergraph for this formula
* @ param nodes the variable to hypergraph node mapping
* @ return the variable ordering according to the FORCE algorithm */
private List < Variable > force ( final Formula formula , final Hypergraph < Variable > hypergraph , final Map < Variable , HypergraphNode < Variable > > nodes ) { } } | final LinkedHashMap < HypergraphNode < Variable > , Integer > initialOrdering = createInitialOrdering ( formula , nodes ) ; LinkedHashMap < HypergraphNode < Variable > , Integer > lastOrdering ; LinkedHashMap < HypergraphNode < Variable > , Integer > currentOrdering = initialOrdering ; do { lastOrdering = currentOrdering ; final LinkedHashMap < HypergraphNode < Variable > , Double > newLocations = new LinkedHashMap < > ( ) ; for ( final HypergraphNode < Variable > node : hypergraph . nodes ( ) ) newLocations . put ( node , node . computeTentativeNewLocation ( lastOrdering ) ) ; currentOrdering = orderingFromTentativeNewLocations ( newLocations ) ; } while ( shouldProceed ( lastOrdering , currentOrdering ) ) ; final Variable [ ] ordering = new Variable [ currentOrdering . size ( ) ] ; for ( final Map . Entry < HypergraphNode < Variable > , Integer > entry : currentOrdering . entrySet ( ) ) ordering [ entry . getValue ( ) ] = entry . getKey ( ) . content ( ) ; return Arrays . asList ( ordering ) ; |
public class DoubleStream { /** * Returns an { @ code DoubleStream } consisting of the results of applying the given
* function to the elements of this stream .
* < p > This is an intermediate operation .
* < p > Example :
* < pre >
* mapper : ( a ) - & gt ; a + 5
* stream : [ 1 , 2 , 3 , 4]
* result : [ 6 , 7 , 8 , 9]
* < / pre >
* @ param mapper the mapper function used to apply to each element
* @ return the new stream
* @ see Stream # map ( com . annimon . stream . function . Function ) */
@ NotNull public DoubleStream map ( @ NotNull final DoubleUnaryOperator mapper ) { } } | return new DoubleStream ( params , new DoubleMap ( iterator , mapper ) ) ; |
public class CalendarPanel { /** * setSizeOfMonthYearPanel , This sets the size of the panel at the top of the calendar that
* holds the month and the year label . The size is calculated from the largest month name ( in
* pixels ) , that exists in locale and language that is being used by the date picker . */
private void setSizeOfMonthYearPanel ( ) { } } | // Skip this function if the settings have not been applied .
if ( settings == null ) { return ; } // Get the font metrics object .
Font font = labelMonth . getFont ( ) ; Canvas canvas = new Canvas ( ) ; FontMetrics metrics = canvas . getFontMetrics ( font ) ; // Calculate the preferred height for the month and year panel .
int heightNavigationButtons = buttonPreviousYear . getPreferredSize ( ) . height ; int preferredHeightMonthLabel = labelMonth . getPreferredSize ( ) . height ; int preferredHeightYearLabel = labelYear . getPreferredSize ( ) . height ; int monthFontHeight = metrics . getHeight ( ) ; int monthFontHeightWithPadding = monthFontHeight + 2 ; int panelHeight = Math . max ( monthFontHeightWithPadding , Math . max ( preferredHeightMonthLabel , Math . max ( preferredHeightYearLabel , heightNavigationButtons ) ) ) ; // Get the length of the longest translated month string ( in pixels ) .
DateFormatSymbols symbols = DateFormatSymbols . getInstance ( settings . getLocale ( ) ) ; String [ ] allLocalMonths = symbols . getMonths ( ) ; int longestMonthPixels = 0 ; for ( String month : allLocalMonths ) { int monthPixels = metrics . stringWidth ( month ) ; longestMonthPixels = ( monthPixels > longestMonthPixels ) ? monthPixels : longestMonthPixels ; } int yearPixels = metrics . stringWidth ( "_2000" ) ; // Calculate the size of a box to hold the text with some padding .
Dimension size = new Dimension ( longestMonthPixels + yearPixels + 12 , panelHeight ) ; // Set the monthAndYearPanel to the appropriate constant size .
monthAndYearOuterPanel . setMinimumSize ( size ) ; monthAndYearOuterPanel . setPreferredSize ( size ) ; // monthAndYearOuterPanel . setMaximumSize ( size ) ;
// Redraw the panel .
this . doLayout ( ) ; this . validate ( ) ; |
public class DCSplashPanel { /** * Wraps a content panel in a scroll pane and applies a maximum width to the
* content to keep it nicely in place on the screen .
* @ param panel
* @ return */
protected JScrollPane wrapContent ( final JComponent panel ) { } } | panel . setMaximumSize ( new Dimension ( WIDTH_CONTENT , Integer . MAX_VALUE ) ) ; panel . setAlignmentX ( Component . LEFT_ALIGNMENT ) ; final DCPanel wrappingPanel = new DCPanel ( ) ; final BoxLayout layout = new BoxLayout ( wrappingPanel , BoxLayout . PAGE_AXIS ) ; wrappingPanel . setLayout ( layout ) ; wrappingPanel . add ( panel ) ; wrappingPanel . setBorder ( new EmptyBorder ( 0 , MARGIN_LEFT , 0 , 0 ) ) ; return WidgetUtils . scrolleable ( wrappingPanel ) ; |
public class ProviderOperationsMetadatasInner { /** * Gets provider operations metadata for all resource providers .
* @ param nextPageLink The NextLink from the previous successful call to List operation .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the observable to the PagedList & lt ; ProviderOperationsMetadataInner & gt ; object */
public Observable < Page < ProviderOperationsMetadataInner > > listNextAsync ( final String nextPageLink ) { } } | return listNextWithServiceResponseAsync ( nextPageLink ) . map ( new Func1 < ServiceResponse < Page < ProviderOperationsMetadataInner > > , Page < ProviderOperationsMetadataInner > > ( ) { @ Override public Page < ProviderOperationsMetadataInner > call ( ServiceResponse < Page < ProviderOperationsMetadataInner > > response ) { return response . body ( ) ; } } ) ; |
public class Line { /** * Counts the amount of ' ch ' in this line .
* @ param ch
* The char to count .
* @ return A value > 0 if this line only consists of ' ch ' end spaces . */
private int countChars ( final char ch ) { } } | int count = 0 ; for ( int i = 0 ; i < this . value . length ( ) ; i ++ ) { final char c = this . value . charAt ( i ) ; if ( c == ' ' ) { continue ; } if ( c == ch ) { count ++ ; continue ; } count = 0 ; break ; } return count ; |
public class LocaleParameterConverter { /** * Converts the given String to a { @ link Locale } object using the { @ link Locale # forLanguageTag ( String ) } method .
* @ param input the input , can be { @ literal null }
* @ return the created locale , empty locale ( { @ code " " " " " " } ) if the given input is { @ code null } or empty .
* @ throws IllegalArgumentException if the instance of T cannot be created from the input . */
@ Override public Locale fromString ( String input ) throws IllegalArgumentException { } } | if ( ! Strings . isNullOrEmpty ( input ) ) { return Locale . forLanguageTag ( input . replace ( "_" , "-" ) ) ; } return InternationalizationService . DEFAULT_LOCALE ; |
public class ScriptTagFilter { /** * @ param scriptElement
* @ return the text that should replace the input field
* @ throws DocumentTemplateException */
private static String addScriptDirectives ( Element scriptElement ) throws IOException , DocumentTemplateException { } } | String scriptReplacement = "" ; List scriptParts = parseScriptParts ( scriptElement . getValue ( ) ) ; for ( int index = 0 ; index < scriptParts . size ( ) ; index ++ ) { ScriptPart scriptPart = ( ScriptPart ) scriptParts . get ( index ) ; if ( scriptPart . getLocation ( ) == null ) { scriptReplacement = scriptPart . getText ( ) ; } else { Element enclosingElement = findEnclosingElement ( scriptElement , scriptPart . getLocation ( ) ) ; if ( scriptPart . isTagAttribute ( ) ) { String [ ] nameValue = scriptPart . getText ( ) . split ( "=" , 2 ) ; if ( nameValue . length != 2 ) { throw new DocumentTemplateException ( "script error: # attribute name=value not found" ) ; } String attributeNamespace = enclosingElement . getNamespaceURI ( ) ; if ( nameValue [ 0 ] . contains ( ":" ) ) { String prefix = nameValue [ 0 ] . split ( ":" ) [ 0 ] ; if ( ! prefix . equals ( enclosingElement . getNamespacePrefix ( ) ) ) { attributeNamespace = XPATH_CONTEXT . lookup ( prefix ) ; if ( attributeNamespace == null ) { throw new DocumentTemplateException ( "unsupported attribute namespace: " + prefix ) ; } } } enclosingElement . addAttribute ( new Attribute ( nameValue [ 0 ] , attributeNamespace , nameValue [ 1 ] ) ) ; } else { ParentNode parent = enclosingElement . getParent ( ) ; int parentIndex = parent . indexOf ( enclosingElement ) ; if ( scriptPart . afterEndTag ( ) ) { parentIndex ++ ; } parent . insertChild ( newNode ( scriptPart . getText ( ) ) , parentIndex ) ; } } } return scriptReplacement ; |
public class CmsChoiceSubmenu { /** * Helper method to position a submenu on the left side of a menu entry . < p >
* @ param widgetEntry the widget entry relative to which the submenu should be positioned */
protected void positionNextToMenuEntry ( final CmsChoiceMenuEntryWidget widgetEntry ) { } } | Element elem = getElement ( ) ; elem . getStyle ( ) . setPosition ( Style . Position . ABSOLUTE ) ; Element referenceElement = null ; int startX = - 2000 ; int startY = - 2000 ; int deltaX = 0 ; int deltaY = 0 ; referenceElement = widgetEntry . getElement ( ) ; Style style = elem . getStyle ( ) ; style . setLeft ( startX , Unit . PX ) ; style . setTop ( startY , Unit . PX ) ; int myRight = elem . getAbsoluteRight ( ) ; int myTop = elem . getAbsoluteTop ( ) ; int refLeft = referenceElement . getAbsoluteLeft ( ) ; int refTop = referenceElement . getAbsoluteTop ( ) ; int newLeft = startX + ( refLeft - myRight ) + deltaX ; int newTop ; if ( openAbove ( referenceElement ) ) { int myHeight = elem . getOffsetHeight ( ) ; int refHeight = referenceElement . getOffsetHeight ( ) ; newTop = startY + ( ( refTop + refHeight ) - ( myTop + myHeight ) ) + deltaY ; } else { newTop = startY + ( refTop - myTop ) + deltaY ; } style . setLeft ( newLeft , Unit . PX ) ; style . setTop ( newTop , Unit . PX ) ; |
public class FileServersInner { /** * Gets information about a File Server .
* @ param resourceGroupName Name of the resource group to which the resource belongs .
* @ param workspaceName The name of the workspace . Workspace names can only contain a combination of alphanumeric characters along with dash ( - ) and underscore ( _ ) . The name must be from 1 through 64 characters long .
* @ param fileServerName The name of the file server within the specified resource group . File server names can only contain a combination of alphanumeric characters along with dash ( - ) and underscore ( _ ) . The name must be from 1 through 64 characters long .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ throws CloudException thrown if the request is rejected by server
* @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
* @ return the FileServerInner object if successful . */
public FileServerInner get ( String resourceGroupName , String workspaceName , String fileServerName ) { } } | return getWithServiceResponseAsync ( resourceGroupName , workspaceName , fileServerName ) . toBlocking ( ) . single ( ) . body ( ) ; |
public class CalculatorFacade { /** * Sets the flag whether the measurement should collect each individual
* terms with their Term Ferquency and Invers Document Frequency scores .
* @ param collectTfIdfTerms
* The TF - IDF collector flag */
public void collectTfIdfTerms ( boolean collectTfIdfTerms ) { } } | if ( this . collectTfIdfTerms != collectTfIdfTerms ) { this . collectTfIdfTerms = collectTfIdfTerms ; changed = true ; if ( tfidfCalculator != null ) { tfidfCalculator . enableTermCollection ( collectTfIdfTerms ) ; } } |
public class AbstractDescribableScriptPlugin { /** * Looks for properties with content conversion , and converts the values
* @ param data map of values for config properties
* @ param context execution context
* @ param pluginProperties definition of plugin properties */
protected void loadContentConversionPropertyValues ( final Map < String , String > data , final ExecutionContext context , final List < Property > pluginProperties ) throws ConfigurationException { } } | // look for " valueConversion " properties
for ( Property property : pluginProperties ) { String name = property . getName ( ) ; String propValue = data . get ( name ) ; if ( null == propValue ) { continue ; } Map < String , Object > renderingOptions = property . getRenderingOptions ( ) ; if ( renderingOptions != null ) { Object conversion = renderingOptions . get ( StringRenderingConstants . VALUE_CONVERSION_KEY ) ; if ( StringRenderingConstants . ValueConversion . STORAGE_PATH_AUTOMATIC_READ . equalsOrString ( conversion ) ) { convertStoragePathValue ( data , context . getStorageTree ( ) , name , propValue , renderingOptions ) ; } else if ( StringRenderingConstants . ValueConversion . PRIVATE_DATA_CONTEXT . equalsOrString ( conversion ) ) { convertPrivateDataValue ( data , context . getPrivateDataContextObject ( ) , name , propValue , renderingOptions ) ; } } } |
public class ServerStats { /** * mutators */
synchronized void updateLatency ( long requestCreateTime ) { } } | long latency = System . currentTimeMillis ( ) - requestCreateTime ; totalLatency += latency ; count ++ ; if ( latency < minLatency ) { minLatency = latency ; } if ( latency > maxLatency ) { maxLatency = latency ; } |
public class LongPollingMessagingDelegate { /** * Opens a channel for long polling .
* @ param ccid
* the ID of the channel
* @ param atmosphereTrackingId
* the tracking ID
* @ return
* @ throws JoynrHttpException
* if no channel with the given ID was found ( e . g . because it
* wasn ' t created before ) or if the tracking ID wasn ' t set */
public Broadcastable openChannel ( String ccid , String atmosphereTrackingId ) { } } | throwExceptionIfTrackingIdnotSet ( atmosphereTrackingId ) ; log . debug ( "GET Channels open long poll channelId: {} trackingId: {}" , ccid , atmosphereTrackingId ) ; // NOTE : as of Atmosphere 0.8.5 : even though the parameter is set
// not to create the broadcaster if not
// found , if the
// broadcaster is found , but set to " destroyed " then it is recreated
// TODO when is a broadcaster " destroyed " ? ? ?
Broadcaster broadcaster = BroadcasterFactory . getDefault ( ) . lookup ( BounceProxyBroadcaster . class , ccid , false ) ; if ( broadcaster == null ) { log . error ( "no broadcaster registered for channel {}" , ccid ) ; // broadcaster not found for given ccid
throw new JoynrHttpException ( Status . BAD_REQUEST , JOYNRMESSAGINGERROR_CHANNELNOTFOUND ) ; } // this causes the long poll , or immediate response if elements are
// in the cache
return new Broadcastable ( broadcaster ) ; |
public class Dispatching { /** * Adapts a proposition to a supplier .
* @ param adaptee the proposition to be adapted
* @ return the adapted supplier */
public static Supplier < Boolean > supplier ( BooleanSupplier adaptee ) { } } | dbc . precondition ( adaptee != null , "cannot adapt a null boolean supplier" ) ; return ( ) -> adaptee . getAsBoolean ( ) ; |
public class AVUser { /** * 通过这个方法可以将AVUser对象强转为其子类对象 */
public static < T extends AVUser > T cast ( AVUser user , Class < T > clazz ) { } } | try { T newUser = AVObject . cast ( user , clazz ) ; return newUser ; } catch ( Exception e ) { } return null ; |
public class ResourceUtilsCore { /** * Extract resource group from a resource ID string .
* @ param id the resource ID string
* @ return the resource group name */
public static String groupFromResourceId ( String id ) { } } | return ( id != null ) ? ResourceId . fromString ( id ) . resourceGroupName ( ) : null ; |
public class ContextMap { /** * { @ inheritDoc } */
@ Override public boolean containsValue ( Object value ) { } } | if ( value != null ) { for ( Property property : getProperties ( _scope ) ) { if ( value . equals ( property . getValue ( ) ) ) { return true ; } } } return false ; |
public class BugInstance { /** * Add source line annotation for given Location in a method .
* @ param methodDescriptor
* the method
* @ param location
* the Location in the method
* @ return this BugInstance */
@ Nonnull public BugInstance addSourceLine ( MethodDescriptor methodDescriptor , Location location ) { } } | try { IAnalysisCache analysisCache = Global . getAnalysisCache ( ) ; ClassContext classContext = analysisCache . getClassAnalysis ( ClassContext . class , methodDescriptor . getClassDescriptor ( ) ) ; Method method = analysisCache . getMethodAnalysis ( Method . class , methodDescriptor ) ; return addSourceLine ( classContext , method , location ) ; } catch ( CheckedAnalysisException e ) { return addSourceLine ( SourceLineAnnotation . createReallyUnknown ( methodDescriptor . getClassDescriptor ( ) . toDottedClassName ( ) ) ) ; } |
public class AnnotationLookup { /** * Returns a CoreAnnotation class key for the given old - style FeatureLabel
* key if one exists ; null otherwise . */
public static KeyLookup getCoreKey ( String oldKey ) { } } | for ( KeyLookup lookup : KeyLookup . values ( ) ) { if ( lookup . oldKey . equals ( oldKey ) ) { return lookup ; } } return null ; |
public class JsonPointer { /** * Helper method to split up the path into individual components ( tokens ) .
* @ param path the path split up into tokens subsequently . */
private void parseComponents ( final String path ) { } } | String [ ] tokens = path . split ( "/" ) ; if ( tokens . length > MAX_NESTING_LEVEL ) { throw new IllegalArgumentException ( "Provided path contains too many levels of nesting!" ) ; } // replace ~ 1 and ~ 0
for ( int i = 0 ; i < tokens . length ; i ++ ) { tokens [ i ] = tokens [ i ] . replace ( "~1" , "/" ) . replace ( "~0" , "~" ) ; } this . refTokens . addAll ( Arrays . asList ( tokens ) ) ; |
public class ContainersInner { /** * Executes a command in a specific container instance .
* Executes a command for a specific container instance in a specified resource group and container group .
* @ param resourceGroupName The name of the resource group .
* @ param containerGroupName The name of the container group .
* @ param containerName The name of the container instance .
* @ param containerExecRequest The request for the exec command .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the observable to the ContainerExecResponseInner object */
public Observable < ContainerExecResponseInner > executeCommandAsync ( String resourceGroupName , String containerGroupName , String containerName , ContainerExecRequest containerExecRequest ) { } } | return executeCommandWithServiceResponseAsync ( resourceGroupName , containerGroupName , containerName , containerExecRequest ) . map ( new Func1 < ServiceResponse < ContainerExecResponseInner > , ContainerExecResponseInner > ( ) { @ Override public ContainerExecResponseInner call ( ServiceResponse < ContainerExecResponseInner > response ) { return response . body ( ) ; } } ) ; |
public class IOUtils { /** * Creates the directory named by the trailing filename of this file , including the complete directory path required
* to create this directory . < br / >
* < br / >
* < ul >
* < strong > Attentions : < / strong >
* < li > makeDirs ( " C : \ \ Users \ \ Trinea " ) can only create users folder < / li >
* < li > makeFolder ( " C : \ \ Users \ \ Trinea \ \ " ) can create Trinea folder < / li >
* < / ul >
* @ param filePath
* @ return true if the necessary directories have been created or the target directory already exists , false one of
* the directories can not be created . */
public static boolean makeDirs ( String filePath ) { } } | String folderName = getFolderName ( filePath ) ; if ( StringUtils . isEmpty ( folderName ) ) { return false ; } File folder = new File ( folderName ) ; return ( folder . exists ( ) && folder . isDirectory ( ) ) || folder . mkdirs ( ) ; |
public class DateProperty { /** * Resets the VTIMEZONE associated with the property . If utc is true , any TZID parameters are removed and the Java
* timezone is updated to UTC time . If utc is false , TZID parameters are removed and the Java timezone is set to the
* default timezone ( i . e . represents a " floating " local time )
* @ param utc a UTC value */
public final void setUtc ( final boolean utc ) { } } | if ( getDate ( ) != null && ( getDate ( ) instanceof DateTime ) ) { ( ( DateTime ) getDate ( ) ) . setUtc ( utc ) ; } getParameters ( ) . remove ( getParameter ( Parameter . TZID ) ) ; |
public class br_configuresyslog { /** * < pre >
* Use this operation to configure Syslog Server settings on Repeater Instances in bulk .
* < / pre > */
public static br_configuresyslog [ ] configuresyslog ( nitro_service client , br_configuresyslog [ ] resources ) throws Exception { } } | if ( resources == null ) throw new Exception ( "Null resource array" ) ; if ( resources . length == 1 ) return ( ( br_configuresyslog [ ] ) resources [ 0 ] . perform_operation ( client , "configuresyslog" ) ) ; return ( ( br_configuresyslog [ ] ) perform_operation_bulk_request ( client , resources , "configuresyslog" ) ) ; |
public class PoolableObject { /** * Initializes this Object when initially created by the Pool for allocation
* @ param < K > The Key wrapped Type
* @ param < E > the Entry Type
* @ param key The Key which is associated with this Pool Object
* @ param pool the pool creating this allocation
* @ return Poolable Object */
@ SuppressWarnings ( "unchecked" ) < K , E extends PoolableObject < V > > E initialize ( PoolKey < K > key , IKeyedObjectPool < ? , V > pool ) { } } | this . key = key ; this . pool = pool ; return ( E ) this ; |
public class X509CRLImpl { /** * Gets the DER encoded CRL information , the
* < code > tbsCertList < / code > from this CRL .
* This can be used to verify the signature independently .
* @ return the DER encoded CRL information .
* @ exception CRLException on encoding errors . */
public byte [ ] getTBSCertList ( ) throws CRLException { } } | if ( tbsCertList == null ) throw new CRLException ( "Uninitialized CRL" ) ; byte [ ] dup = new byte [ tbsCertList . length ] ; System . arraycopy ( tbsCertList , 0 , dup , 0 , dup . length ) ; return dup ; |
public class MachineTime { /** * / * [ deutsch ]
* < p > Wandelt diese maschinelle Dauer in einen dezimalen Sekundenbetrag um . < / p >
* @ return BigDecimal */
public BigDecimal toBigDecimal ( ) { } } | StringBuilder sb = new StringBuilder ( ) ; this . createNumber ( sb ) ; return new BigDecimal ( sb . toString ( ) ) ; |
public class BasketOption { /** * This method returns the value random variable of the product within the specified model , evaluated at a given evalutationTime .
* Note : For a lattice this is often the value conditional to evalutationTime , for a Monte - Carlo simulation this is the ( sum of ) value discounted to evaluation time .
* Cashflows prior evaluationTime are not considered .
* @ param evaluationTime The time on which this products value should be observed .
* @ param model The model used to price the product .
* @ return The random variable representing the value of the product discounted to evaluation time
* @ throws net . finmath . exception . CalculationException Thrown if the valuation fails , specific cause may be available via the < code > cause ( ) < / code > method . */
@ Override public RandomVariableInterface getValue ( double evaluationTime , AssetModelMonteCarloSimulationInterface model ) throws CalculationException { } } | // Get underlying and numeraire
RandomVariableInterface values = model . getRandomVariableForConstant ( 0.0 ) ; for ( int underlyingIndex = 0 ; underlyingIndex < weights . length ; underlyingIndex ++ ) { // Get S _ { i } ( T )
RandomVariableInterface underlyingAtMaturity = model . getAssetValue ( maturity , underlyingIndex ) ; values = values . addProduct ( underlyingAtMaturity , weights [ underlyingIndex ] ) ; } // Apply optionality
values = values . sub ( strike ) . floor ( 0.0 ) ; // Discounting . . .
RandomVariableInterface numeraireAtMaturity = model . getNumeraire ( maturity ) ; RandomVariableInterface monteCarloWeights = model . getMonteCarloWeights ( maturity ) ; values = values . div ( numeraireAtMaturity ) . mult ( monteCarloWeights ) ; // . . . to evaluation time .
RandomVariableInterface numeraireAtEvalTime = model . getNumeraire ( evaluationTime ) ; RandomVariableInterface monteCarloProbabilitiesAtEvalTime = model . getMonteCarloWeights ( evaluationTime ) ; values = values . mult ( numeraireAtEvalTime ) . div ( monteCarloProbabilitiesAtEvalTime ) ; return values ; |
public class CmsRfsFileViewer { /** * Set the path in the real file system that points to the folder / tree
* containing the log files . < p >
* This method will only success if the folder specified by the < code > path < / code >
* argument is valid within the file system . < p >
* @ param path the path in the real file system that points to the folder containing the log files
* @ throws CmsRuntimeException if the configuration of this instance has been frozen
* @ throws CmsRfsException if the given path is invalid */
public void setRootPath ( String path ) throws CmsRfsException , CmsRuntimeException { } } | checkFrozen ( ) ; if ( path != null ) { // leading whitespace from CmsComboWidget causes exception
path = path . trim ( ) ; } if ( CmsStringUtil . isEmpty ( path ) ) { throw new CmsRfsException ( Messages . get ( ) . container ( Messages . ERR_FILE_ARG_EMPTY_1 , new Object [ ] { String . valueOf ( path ) } ) ) ; } try { // just for validation :
File file = new File ( path ) ; if ( file . exists ( ) ) { m_rootPath = file . getCanonicalPath ( ) ; } else { // if wrong configuration perform self healing :
if ( OpenCms . getRunLevel ( ) == OpenCms . RUNLEVEL_2_INITIALIZING ) { // this deletes the illegal entry
m_rootPath = new File ( OpenCms . getSystemInfo ( ) . getLogFileRfsPath ( ) ) . getParent ( ) ; } else { throw new CmsRfsException ( Messages . get ( ) . container ( Messages . ERR_FILE_ARG_NOT_FOUND_1 , new Object [ ] { String . valueOf ( path ) } ) ) ; } } } catch ( IOException ioex ) { // if wrong configuration perform self healing :
if ( OpenCms . getRunLevel ( ) == OpenCms . RUNLEVEL_2_INITIALIZING ) { // this deletes the illegal entry and will default to the log file path
m_rootPath = new File ( OpenCms . getSystemInfo ( ) . getLogFileRfsPath ( ) ) . getParent ( ) ; } else { throw new CmsRfsException ( Messages . get ( ) . container ( Messages . ERR_FILE_ARG_ACCESS_1 , new Object [ ] { String . valueOf ( path ) } ) , ioex ) ; } } |
public class ServerState { /** * Gets the RMI < code > Registry < / code > to register the server with , creating
* it if necessary .
* @ return The RMI < code > Registry < / code > to register the server with .
* @ throws RemoteException If an error occurs while attempting to create
* the < code > Registry < / code > . */
public synchronized Registry getRegistry ( ) throws RemoteException { } } | if ( registry == null ) { registry = LocateRegistry . createRegistry ( JdcpUtil . DEFAULT_PORT ) ; } return registry ; |
public class SSD1306MessageFactory { /** * Sets vertical scroll area .
* For 64 - row display :
* < ul >
* < li > fixedRows = 0 , scrolledRows : whole area scrolls < / li >
* < li > fixedRows = 0 , scrolledRows & lt ; 64 : top area scrolls < / li >
* < li > fixedRows + scrolledRows & lt ; 64 : central area scrolls < / li >
* < li > fixedRows + scrolledRows = 64 : bottom area scrolls < / li >
* < / ul >
* Note that :
* < ul >
* < li > fixedRows + scrolledRows & lt ; = { @ link # setMultiplexRatio } < / li >
* < li > scrolledRows & lt ; = { @ link # setMultiplexRatio } < / li >
* < li > verticalOffset from { @ link # setVerticalAndHorizontalScroll } & lt ; = scrolledRows < / li >
* < li > { @ link # setDisplayStartLine } & lt ; = scrolledRows < / li >
* < / ul >
* @ param fixedRows No . of rows in top fixed area . It is referenced to the
* top of the GDDRAM .
* @ param scrolledRows No . of rows in scroll area . This is the number of
* rows to be used for vertical scrolling . The scroll area starts in the
* first row below the top fixed area .
* @ return command sequence */
public static byte [ ] setVerticalScrollArea ( byte fixedRows , byte scrolledRows ) { } } | if ( fixedRows < 0 || fixedRows > 63 ) { throw new IllegalArgumentException ( "Fixed rows must be between 0 and 63." ) ; } if ( scrolledRows < 0 || scrolledRows > 127 ) { throw new IllegalArgumentException ( "Scrolled rows must be between 0 and 127." ) ; } return new byte [ ] { COMMAND_CONTROL_BYTE , SET_VERTICAL_SCROLL_AREA , COMMAND_CONTROL_BYTE , fixedRows , COMMAND_CONTROL_BYTE , scrolledRows } ; |
public class DescribeClientVpnEndpointsResult { /** * Information about the Client VPN endpoints .
* @ param clientVpnEndpoints
* Information about the Client VPN endpoints . */
public void setClientVpnEndpoints ( java . util . Collection < ClientVpnEndpoint > clientVpnEndpoints ) { } } | if ( clientVpnEndpoints == null ) { this . clientVpnEndpoints = null ; return ; } this . clientVpnEndpoints = new com . amazonaws . internal . SdkInternalList < ClientVpnEndpoint > ( clientVpnEndpoints ) ; |
public class ColVals { /** * < p > Put column with Long val . < / p >
* @ param pNm column name
* @ param pVal column val */
public final void put ( final String pNm , final Long pVal ) { } } | if ( isId ( pNm ) ) { if ( this . idLongs == null ) { this . idLongs = new HashMap < String , Long > ( ) ; } this . idLongs . put ( pNm , pVal ) ; } else { if ( this . longs == null ) { this . longs = new HashMap < String , Long > ( ) ; } this . longs . put ( pNm , pVal ) ; } |
public class VisLmlSyntax { /** * Spinner attributes . */
protected void registerSpinnerAttributes ( ) { } } | addAttributeProcessor ( new SpinnerArrayLmlAttribute ( ) , "items" ) ; addAttributeProcessor ( new SpinnerDisabledLmlAttribute ( ) , "disabled" , "inputDisabled" ) ; addAttributeProcessor ( new SpinnerNameLmlAttribute ( ) , "selectorName" , "text" ) ; addAttributeProcessor ( new SpinnerPrecisionLmlAttribute ( ) , "precision" , "scale" ) ; addAttributeProcessor ( new SpinnerProgrammaticChangeEventsLmlAttribute ( ) , "programmaticChangeEvents" ) ; addAttributeProcessor ( new SpinnerSelectedLmlAttribute ( ) , "selected" ) ; addAttributeProcessor ( new SpinnerTextFieldEventPolicyLmlAttribute ( ) , "textFieldEventPolicy" ) ; addAttributeProcessor ( new SpinnerWrapLmlAttribute ( ) , "wrap" ) ; |
public class PersonName { /** * 获取所有的姓
* @ return 有序列表 */
public static List < String > getSurnames ( ) { } } | List < String > result = new ArrayList < > ( ) ; result . addAll ( SURNAME_1 ) ; result . addAll ( SURNAME_2 ) ; Collections . sort ( result ) ; return result ; |
public class AlpineParser { /** * / * - - - Overridden methods - - - */
@ Override public Collection < DependencyInfo > parse ( File file ) { } } | BufferedReader br = null ; FileReader fr = null ; Collection < DependencyInfo > dependencyInfos = new LinkedList < > ( ) ; try { fr = new FileReader ( file . getAbsoluteFile ( ) ) ; br = new BufferedReader ( fr ) ; String line = null ; Package packageInfo = new Package ( ) ; // Create Alpine package - package - version - architecture . apk
while ( ( line = br . readLine ( ) ) != null ) { if ( ! line . isEmpty ( ) ) { if ( packageInfo . getPackageName ( ) == null || packageInfo . getVersion ( ) == null || packageInfo . getArchitecture ( ) == null ) { String [ ] lineSplit = line . split ( Constants . COLON ) ; String dependencyParameter = lineSplit [ 1 ] . trim ( ) ; switch ( lineSplit [ 0 ] ) { case PACKAGE : packageInfo . setPackageName ( dependencyParameter ) ; break ; case VERSION : packageInfo . setVersion ( dependencyParameter ) ; break ; case ARCHITECTURE : packageInfo . setArchitecture ( dependencyParameter ) ; break ; default : break ; } } } else { DependencyInfo dependencyInfo = createDependencyInfo ( packageInfo ) ; packageInfo = new Package ( ) ; dependencyInfos . add ( dependencyInfo ) ; } } } catch ( Exception e ) { logger . error ( e . getMessage ( ) ) ; logger . debug ( "{}" , e . getStackTrace ( ) ) ; } finally { closeStream ( br , fr ) ; } return dependencyInfos ; |
public class StorageAccountsInner { /** * Updates the specified Data Lake Analytics account to remove an Azure Storage account .
* @ param resourceGroupName The name of the Azure resource group .
* @ param accountName The name of the Data Lake Analytics account .
* @ param storageAccountName The name of the Azure Storage account to remove
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ throws CloudException thrown if the request is rejected by server
* @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent */
public void delete ( String resourceGroupName , String accountName , String storageAccountName ) { } } | deleteWithServiceResponseAsync ( resourceGroupName , accountName , storageAccountName ) . toBlocking ( ) . single ( ) . body ( ) ; |
public class Trigram { /** * 计算分词结果的三元模型分值
* @ param words 分词结果
* @ return 三元模型分值 */
public static float trigram ( List < Word > words ) { } } | if ( words . size ( ) > 2 ) { float score = 0 ; for ( int i = 0 ; i < words . size ( ) - 2 ; i ++ ) { score += Trigram . getScore ( words . get ( i ) . getText ( ) , words . get ( i + 1 ) . getText ( ) , words . get ( i + 2 ) . getText ( ) ) ; } return score ; } return 0 ; |
public class DefaultGroovyMethods { /** * Extend class globally with category methods .
* @ param self any Class
* @ param categoryClass a category class to use
* @ since 1.6.0 */
public static void mixin ( Class self , Class categoryClass ) { } } | mixin ( getMetaClass ( self ) , Collections . singletonList ( categoryClass ) ) ; |
public class BoxEntity { /** * Gets the type of the entity .
* @ return the entity type . */
public String getType ( ) { } } | String type = getPropertyAsString ( FIELD_TYPE ) ; if ( type == null ) { return getPropertyAsString ( FIELD_ITEM_TYPE ) ; } return type ; |
public class Bits { /** * Writes 2 sequence numbers ( seqnos ) in compressed format to buf .
* The seqnos are non - negative and hr is guaranteed to be & gt ; = hd .
* Once variable - length encoding has been implemented , this method will probably get dropped as we can simply
* write the 2 longs individually .
* @ param hd the highest delivered seqno . Guaranteed to be a positive number
* @ param hr the highest received seqno . Guaranteed to be a positive number . Greater than or equal to hd
* @ param buf the buffer to write to */
public static void writeLongSequence ( long hd , long hr , ByteBuffer buf ) { } } | if ( hr < hd ) throw new IllegalArgumentException ( "hr (" + hr + ") has to be >= hd (" + hd + ")" ) ; if ( hd == 0 && hr == 0 ) { buf . put ( ( byte ) 0 ) ; return ; } long delta = hr - hd ; // encode highest _ delivered followed by delta
byte bytes_for_hd = bytesRequiredFor ( hd ) , bytes_for_delta = bytesRequiredFor ( delta ) ; byte bytes_needed = encodeLength ( bytes_for_hd , bytes_for_delta ) ; buf . put ( bytes_needed ) ; for ( int i = 0 ; i < bytes_for_hd ; i ++ ) buf . put ( getByteAt ( hd , i ) ) ; for ( int i = 0 ; i < bytes_for_delta ; i ++ ) buf . put ( getByteAt ( delta , i ) ) ; |
public class SoyFileSetParser { /** * Parses a set of Soy files , returning a structure containing the parse tree and template
* registry . */
private ParseResult parseWithVersions ( ) throws IOException { } } | List < TemplateMetadata > templateMetadatas = new ArrayList < > ( ) ; for ( CompilationUnitAndKind unit : compilationUnits ( ) ) { templateMetadatas . addAll ( TemplateMetadataSerializer . templatesFromCompilationUnit ( unit . compilationUnit ( ) , unit . fileKind ( ) , typeRegistry ( ) , unit . filePath ( ) , errorReporter ( ) ) ) ; } IdGenerator nodeIdGen = ( cache ( ) != null ) ? cache ( ) . getNodeIdGenerator ( ) : new IncrementingIdGenerator ( ) ; SoyFileSetNode soyTree = new SoyFileSetNode ( nodeIdGen . genId ( ) , nodeIdGen ) ; boolean filesWereSkipped = false ; // TODO ( lukes ) : there are other places in the compiler ( autoescaper ) which may use the id
// generator but fail to lock on it . Eliminate the id system to avoid this whole issue .
synchronized ( nodeIdGen ) { // Avoid using the same ID generator in multiple threads .
for ( SoyFileSupplier fileSupplier : soyFileSuppliers ( ) . values ( ) ) { SoyFileSupplier . Version version = fileSupplier . getVersion ( ) ; VersionedFile cachedFile = cache ( ) != null ? cache ( ) . get ( fileSupplier . getFilePath ( ) , version ) : null ; SoyFileNode node ; if ( cachedFile == null ) { node = parseSoyFileHelper ( fileSupplier , nodeIdGen ) ; // TODO ( b / 19269289 ) : implement error recovery and keep on trucking in order to display
// as many errors as possible . Currently , the later passes just spew NPEs if run on
// a malformed parse tree .
if ( node == null ) { filesWereSkipped = true ; continue ; } // Run passes that are considered part of initial parsing .
passManager ( ) . runSingleFilePasses ( node , nodeIdGen ) ; // Run passes that check the tree .
if ( cache ( ) != null ) { cache ( ) . put ( fileSupplier . getFilePath ( ) , VersionedFile . of ( node , version ) ) ; } } else { node = cachedFile . file ( ) ; } for ( TemplateNode template : node . getChildren ( ) ) { templateMetadatas . add ( TemplateMetadata . fromTemplate ( template ) ) ; } soyTree . addChild ( node ) ; } TemplateRegistry registry = new TemplateRegistry ( templateMetadatas , errorReporter ( ) ) ; // Run passes that check the tree iff we successfully parsed every file .
if ( ! filesWereSkipped ) { passManager ( ) . runWholeFilesetPasses ( soyTree , registry ) ; } return ParseResult . create ( soyTree , registry ) ; } |
public class PrimaveraXERFileReader { /** * Filters a list of rows from the named table . If a column name and a value
* are supplied , then use this to filter the rows . If no column name is
* supplied , then return all rows .
* @ param tableName table name
* @ param columnName filter column name
* @ param id filter column value
* @ return filtered list of rows */
private List < Row > getRows ( String tableName , String columnName , Integer id ) { } } | List < Row > result ; List < Row > table = m_tables . get ( tableName ) ; if ( table == null ) { result = Collections . < Row > emptyList ( ) ; } else { if ( columnName == null ) { result = table ; } else { result = new LinkedList < Row > ( ) ; for ( Row row : table ) { if ( NumberHelper . equals ( id , row . getInteger ( columnName ) ) ) { result . add ( row ) ; } } } } return result ; |
public class StringUtils { /** * Generates a digest ( hex string ) for the given string */
public static String digest ( String string , String digestAlgo , String encoding ) { } } | try { MessageDigest digester = MessageDigest . getInstance ( digestAlgo ) ; digester . update ( string . getBytes ( encoding ) ) ; return hex ( digester . digest ( ) ) ; } catch ( NoSuchAlgorithmException | UnsupportedEncodingException e ) { throw new RuntimeException ( e ) ; } |
public class StatsCollector { /** * Adds a tag to all the subsequent data points recorded .
* All subsequent calls to one of the { @ code record } methods will
* associate the tag given to this method with the data point .
* This method can be called multiple times to associate multiple tags
* with all the subsequent data points .
* @ param name The name of the tag .
* @ param value The value of the tag .
* @ throws IllegalArgumentException if the name or the value are empty
* or otherwise invalid .
* @ see # clearExtraTag */
public final void addExtraTag ( final String name , final String value ) { } } | if ( name . length ( ) <= 0 ) { throw new IllegalArgumentException ( "empty tag name, value=" + value ) ; } else if ( value . length ( ) <= 0 ) { throw new IllegalArgumentException ( "empty value, tag name=" + name ) ; } else if ( name . indexOf ( '=' ) != - 1 ) { throw new IllegalArgumentException ( "tag name contains `=': " + name + " (value = " + value + ')' ) ; } else if ( value . indexOf ( '=' ) != - 1 ) { throw new IllegalArgumentException ( "tag value contains `=': " + value + " (name = " + name + ')' ) ; } if ( extratags == null ) { extratags = new HashMap < String , String > ( ) ; } extratags . put ( name , value ) ; |
public class TangoEventsAdapter { public void addTangoAttConfigListener ( ITangoAttConfigListener listener , String attrName , String [ ] filters , boolean stateless ) throws DevFailed { } } | TangoAttConfig tangoAttConfig ; String key = deviceName + "/" + attrName ; if ( ( tangoAttConfig = tango_att_config_source . get ( key ) ) == null ) { tangoAttConfig = new TangoAttConfig ( deviceProxy , attrName , filters ) ; tango_att_config_source . put ( key , tangoAttConfig ) ; } synchronized ( moni ) { tangoAttConfig . addTangoAttConfigListener ( listener , stateless ) ; } |
public class OsHelpers { /** * When using / bin / sh - c " . . . " there is a single argument to the process . This method builds it . < br >
* Note we encourange users through the GUI to only specify the whole shell command in a single field . Only XML imports may result in
* multiple arguments .
* @ param resultList
* @ param ji */
private static void addAllParametersAsSingleString ( List < String > resultList , String commandLine , Map < String , String > prms ) { } } | List < String > raw = new ArrayList < > ( prms . size ( ) * 2 ) ; // Command itself
raw . add ( commandLine ) ; // Parameters , ordered by key
List < String > keys = new ArrayList < > ( prms . keySet ( ) ) ; Collections . sort ( keys , prmComparator ) ; for ( String p : keys ) { if ( ! prms . get ( p ) . trim ( ) . isEmpty ( ) ) { raw . add ( prms . get ( p ) . trim ( ) ) ; } } if ( ! raw . isEmpty ( ) ) { resultList . add ( StringUtils . join ( raw , " " ) ) ; } |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.