signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class StartImportRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( StartImportRequest startImportRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( startImportRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( startImportRequest . getPayload ( ) , PAYLOAD_BINDING ) ; protocolMarshaller . marshall ( startImportRequest . getResourceType ( ) , RESOURCETYPE_BINDING ) ; protocolMarshaller . marshall ( startImportRequest . getMergeStrategy ( ) , MERGESTRATEGY_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class SubscribedTrackUpdater { /** * Add the requested post parameters to the Request . * @ param request Request to add post params to */ private void addPostParams ( final Request request ) { } }
if ( track != null ) { request . addPostParam ( "Track" , track . toString ( ) ) ; } if ( publisher != null ) { request . addPostParam ( "Publisher" , publisher . toString ( ) ) ; } if ( kind != null ) { request . addPostParam ( "Kind" , kind . toString ( ) ) ; } if ( status != null ) { request . addPostParam ( "Status" , status . toString ( ) ) ; }
public class ConfigUtils { /** * Returns the specified String property from the configuration * @ param config the configuration * @ param key the key of the property * @ return the String value of the property , or null if not found * @ throws DeployerConfigurationException if an error occurred */ public static String getStringProperty ( Configuration config , String key ) throws DeployerConfigurationException { } }
return getStringProperty ( config , key , null ) ;
public class DfsServlet { /** * Create a { @ link NameNode } proxy from the current { @ link ServletContext } . */ protected synchronized ClientProtocol createNameNodeProxy ( UnixUserGroupInformation ugi ) throws IOException { } }
if ( nnProxy != null ) { return nnProxy ; } ServletContext context = getServletContext ( ) ; InetSocketAddress nnAddr = ( InetSocketAddress ) context . getAttribute ( "name.node.address" ) ; if ( nnAddr == null ) { throw new IOException ( "The namenode is not out of safemode yet" ) ; } Configuration conf = new Configuration ( ( Configuration ) context . getAttribute ( "name.conf" ) ) ; UnixUserGroupInformation . saveToConf ( conf , UnixUserGroupInformation . UGI_PROPERTY_NAME , ugi ) ; nnProxy = DFSClient . createNamenode ( nnAddr , conf ) ; return nnProxy ;
public class MoreStringUtils { /** * Returns a { @ link File } from the given filename . * Ensures that the incoming filename has backslashes converted to forward slashes ( on Unix OS ) , * and vice - versa on Windows OS , otherwise , the path separation methods of { @ link File } will not * work . * @ param parent * parent file name * @ param filename * filename to be used * @ return a { @ link File } from the given filename . */ public static File asFile ( final File parent , final String filename ) { } }
final String normalized = normalizeSeparators ( filename ) ; return normalized != null ? new File ( parent , normalized ) : parent ;
public class SizeBoundedQueue { /** * Removes and adds to collection c as many elements as possible ( including waiters ) but not exceeding max _ bytes . * E . g . if we have elements { 1000b , 2000b , 4000b } and max _ bytes = 6000 , then only the first 2 elements are removed * and the new size is 4000 * @ param c The collection to transfer the removed elements to * @ param max _ bytes The max number of bytes to remove * @ return The accumulated number of bytes of all removed elements */ public int drainTo ( Collection < T > c , final int max_bytes ) { } }
if ( c == null ) throw new IllegalArgumentException ( "collection to drain elements to must not be null" ) ; if ( max_bytes <= 0 ) return 0 ; int bytes = 0 ; El < T > el ; boolean at_least_one_removed = false ; lock . lock ( ) ; try { // go as long as there are elements in the queue or pending waiters while ( ( el = queue . peek ( ) ) != null || waiters > 0 ) { if ( el != null ) { if ( bytes + el . size > max_bytes ) break ; el = queue . poll ( ) ; at_least_one_removed = true ; count -= el . size ; bytes += el . size ; c . add ( el . el ) ; } else { // queue is empty , wait on more elements to be added not_full . signalAll ( ) ; // releases the waiters , causing them to add their elements to the queue try { not_empty . await ( ) ; } catch ( InterruptedException e ) { break ; } } } if ( at_least_one_removed ) not_full . signalAll ( ) ; return bytes ; } finally { lock . unlock ( ) ; }
public class ListBackupSelectionsResult { /** * An array of backup selection list items containing metadata about each resource in the list . * @ param backupSelectionsList * An array of backup selection list items containing metadata about each resource in the list . */ public void setBackupSelectionsList ( java . util . Collection < BackupSelectionsListMember > backupSelectionsList ) { } }
if ( backupSelectionsList == null ) { this . backupSelectionsList = null ; return ; } this . backupSelectionsList = new java . util . ArrayList < BackupSelectionsListMember > ( backupSelectionsList ) ;
public class DemoController { /** * Personalized accounts page . Content is private to the user * so it should only be stored in a private cache . */ @ CacheControl ( policy = { } }
CachePolicy . PRIVATE , CachePolicy . MUST_REVALIDATE } ) @ RequestMapping ( "/account.do" ) public String handleAccountRequest ( Model model ) { model . addAttribute ( "pageName" , "Your Account" ) ; return "page" ;
public class POIUtils { /** * シートの最大列数を取得する 。 * < p > { @ literal jxl . Sheet . getColumns ( ) } < / p > * @ param sheet シートオブジェクト * @ return 最大列数 * @ throws IllegalArgumentException { @ literal sheet = = null . } */ public static int getColumns ( final Sheet sheet ) { } }
ArgUtils . notNull ( sheet , "sheet" ) ; int minRowIndex = sheet . getFirstRowNum ( ) ; int maxRowIndex = sheet . getLastRowNum ( ) ; int maxColumnsIndex = 0 ; for ( int i = minRowIndex ; i <= maxRowIndex ; i ++ ) { final Row row = sheet . getRow ( i ) ; if ( row == null ) { continue ; } final int column = row . getLastCellNum ( ) ; if ( column > maxColumnsIndex ) { maxColumnsIndex = column ; } } return maxColumnsIndex ;
public class AbstractBigtableTable { /** * { @ inheritDoc } */ @ Override public Result increment ( Increment increment ) throws IOException { } }
LOG . trace ( "increment(Increment)" ) ; Span span = TRACER . spanBuilder ( "BigtableTable.increment" ) . startSpan ( ) ; try ( Scope scope = TRACER . withSpan ( span ) ) { ReadModifyWriteRow request = hbaseAdapter . adapt ( increment ) ; return Adapters . ROW_ADAPTER . adaptResponse ( clientWrapper . readModifyWriteRow ( request ) ) ; } catch ( Throwable t ) { span . setStatus ( Status . UNKNOWN ) ; throw logAndCreateIOException ( "increment" , increment . getRow ( ) , t ) ; } finally { span . end ( ) ; }
public class LogRef { /** * * Log a debug level message . * * * @ param msg * Log message * * @ param sr * The < code > ServiceReference < / code > of the service * that this * message is associated with . */ public void debug ( String msg , ServiceReference sr ) { } }
doLog ( msg , LOG_DEBUG , sr , null ) ;
public class JavaHelper { /** * Loads a class of the given name from the project class loader or returns null if its not found */ public static Class < ? > loadProjectClass ( Project project , String className ) { } }
URLClassLoader classLoader = getProjectClassLoader ( project ) ; if ( classLoader != null ) { try { return classLoader . loadClass ( className ) ; } catch ( ClassNotFoundException e ) { // ignore } finally { try { classLoader . close ( ) ; } catch ( IOException e ) { // ignore } } } return null ;
public class JSON { /** * / * Mutant factories */ public JSON with ( TokenStreamFactory f ) { } }
if ( f == _streamFactory ) { return this ; } return _with ( _features , f , _treeCodec , _reader , _writer , _prettyPrinter ) ;
public class IR { public static Node sheq ( Node expr1 , Node expr2 ) { } }
return binaryOp ( Token . SHEQ , expr1 , expr2 ) ;
public class ScriptableProcessor { /** * Draws the page to an image file and marks selected areas in the image . * @ param path The path to the destination image file . * @ param areaNames A substring of the names of areas that should be marked in the image . When set to { @ code null } , all the areas are marked . */ public void drawToImageWithAreas ( String path , String areaNames ) { } }
try { ImageOutputDisplay disp = new ImageOutputDisplay ( getPage ( ) . getWidth ( ) , getPage ( ) . getHeight ( ) ) ; disp . drawPage ( getPage ( ) ) ; showAreas ( disp , getAreaTree ( ) . getRoot ( ) , areaNames ) ; disp . saveTo ( path ) ; } catch ( IOException e ) { log . error ( "Couldn't write to " + path + ": " + e . getMessage ( ) ) ; }
public class SimpleMerger { /** * Merges the < em > source < / em > element ( and its " downstream " dependents ) * into < em > target < / em > model . * @ see # merge ( Model , Collection ) * @ param target the BioPAX model to merge into * @ param source object to add or merge */ public void merge ( Model target , BioPAXElement source ) { } }
merge ( target , Collections . singleton ( source ) ) ;
public class UTF8Reader { /** * Skip characters . This method will block until some characters are * available , an I / O error occurs , or the end of the stream is reached . * @ param n The number of characters to skip * @ return The number of characters actually skipped * @ exception IOException If an I / O error occurs */ public long skip ( long n ) throws IOException { } }
long remaining = n ; final char [ ] ch = new char [ fBuffer . length ] ; do { int length = ch . length < remaining ? ch . length : ( int ) remaining ; int count = read ( ch , 0 , length ) ; if ( count > 0 ) { remaining -= count ; } else { break ; } } while ( remaining > 0 ) ; long skipped = n - remaining ; return skipped ;
public class PhysicalNamingStrategyShogunCore { /** * Converts table names to lower case and limits the length if necessary . */ @ Override public Identifier toPhysicalTableName ( Identifier tableIdentifier , JdbcEnvironment context ) { } }
return convertToLimitedLowerCase ( context , tableIdentifier , tablePrefix ) ;
public class CommerceOrderItemPersistenceImpl { /** * Returns the last commerce order item in the ordered set where CPInstanceId = & # 63 ; . * @ param CPInstanceId the cp instance ID * @ param orderByComparator the comparator to order the set by ( optionally < code > null < / code > ) * @ return the last matching commerce order item , or < code > null < / code > if a matching commerce order item could not be found */ @ Override public CommerceOrderItem fetchByCPInstanceId_Last ( long CPInstanceId , OrderByComparator < CommerceOrderItem > orderByComparator ) { } }
int count = countByCPInstanceId ( CPInstanceId ) ; if ( count == 0 ) { return null ; } List < CommerceOrderItem > list = findByCPInstanceId ( CPInstanceId , count - 1 , count , orderByComparator ) ; if ( ! list . isEmpty ( ) ) { return list . get ( 0 ) ; } return null ;
public class MainPopupMenu { /** * ZAP : Added support for submenus */ private JMenu getSuperMenu ( String name , int index ) { } }
JMenu superMenu = superMenus . get ( name ) ; if ( superMenu == null ) { // Use an ExtensionPopupMenu so child menus are dismissed superMenu = new ExtensionPopupMenu ( name ) { private static final long serialVersionUID = 6825880451078204378L ; @ Override public boolean isEnableForComponent ( Component invoker ) { return true ; } } ; superMenus . put ( name , superMenu ) ; addMenuItem ( superMenu , index ) ; } return superMenu ;
public class FrustumIntersection { /** * Determine whether the given axis - aligned box is partly or completely within or outside of the frustum defined by < code > this < / code > frustum culler * and , if the box is not inside this frustum , return the index of the plane that culled it . * The box is specified via its < code > min < / code > and < code > max < / code > corner coordinates . * This method differs from { @ link # intersectAab ( Vector3fc , Vector3fc ) } in that * it allows to mask - off planes that should not be calculated . For example , in order to only test a box against the * left frustum plane , use a mask of { @ link # PLANE _ MASK _ NX } . Or in order to test all planes < i > except < / i > the left plane , use * a mask of < code > ( ~ 0 ^ PLANE _ MASK _ NX ) < / code > . * In addition , the < code > startPlane < / code > denotes the first frustum plane to test the box against . To use this effectively means to store the * plane that previously culled an axis - aligned box ( as returned by < code > intersectAab ( ) < / code > ) and in the next frame use the return value * as the argument to the < code > startPlane < / code > parameter of this method . The assumption is that the plane that culled the object previously will also * cull it now ( temporal coherency ) and the culling computation is likely reduced in that case . * The algorithm implemented by this method is conservative . This means that in certain circumstances a < i > false positive < / i > * can occur , when the method returns < code > - 1 < / code > for boxes that are actually not visible / do not intersect the frustum . * See < a href = " http : / / iquilezles . org / www / articles / frustumcorrect / frustumcorrect . htm " > iquilezles . org < / a > for an examination of this problem . * @ param min * the minimum corner coordinates of the axis - aligned box * @ param max * the maximum corner coordinates of the axis - aligned box * @ param mask * contains as bitset all the planes that should be tested . * This value can be any combination of * { @ link # PLANE _ MASK _ NX } , { @ link # PLANE _ MASK _ PX } , * { @ link # PLANE _ MASK _ NY } , { @ link # PLANE _ MASK _ PY } , * { @ link # PLANE _ MASK _ NZ } and { @ link # PLANE _ MASK _ PZ } * @ param startPlane * the first frustum plane to test the axis - aligned box against . It is one of * { @ link # PLANE _ NX } , { @ link # PLANE _ PX } , { @ link # PLANE _ NY } , { @ link # PLANE _ PY } , { @ link # PLANE _ NZ } and { @ link # PLANE _ PZ } * @ return the index of the first plane that culled the box , if the box does not intersect the frustum , * or { @ link # INTERSECT } if the box intersects the frustum , or { @ link # INSIDE } if the box is fully inside of the frustum . * The plane index is one of { @ link # PLANE _ NX } , { @ link # PLANE _ PX } , { @ link # PLANE _ NY } , { @ link # PLANE _ PY } , { @ link # PLANE _ NZ } and { @ link # PLANE _ PZ } */ public int intersectAab ( Vector3fc min , Vector3fc max , int mask , int startPlane ) { } }
return intersectAab ( min . x ( ) , min . y ( ) , min . z ( ) , max . x ( ) , max . y ( ) , max . z ( ) , mask , startPlane ) ;
public class br_disable { /** * < pre > * Use this operation to Disable Repeater Instances . * < / pre > */ public static br_disable disable ( nitro_service client , br_disable resource ) throws Exception { } }
return ( ( br_disable [ ] ) resource . perform_operation ( client , "disable" ) ) [ 0 ] ;
public class NetworkServiceRecordAgent { /** * Update a VNFRecordDependency . * @ param idNsr the ID of the NetworkServiceRecord containing the VNFRecordDependency * @ param idVnfrDep the ID of the VNFRecordDependency to update * @ param vnfRecordDependency the updated version of the VNFRecordDependency * @ return the updated VNFRecordDependency * @ throws SDKException if the request fails */ @ Help ( help = "Update the VirtualNetworkFunctionRecord Dependency of a NetworkServiceRecord with specific id" ) public VNFRecordDependency updateVNFDependency ( final String idNsr , final String idVnfrDep , final VNFRecordDependency vnfRecordDependency ) throws SDKException { } }
String url = idNsr + "/vnfdependencies" + "/" + idVnfrDep ; return ( VNFRecordDependency ) requestPut ( url , vnfRecordDependency ) ;
public class GrassRasterReader { /** * read the null value from the null file ( if it exists ) and returns the information about the * particular cell ( true if it is novalue , false if it is not a novalue * @ param currentfilerow * @ param currentfilecol * @ return */ private boolean readNullValueAtRowCol ( int currentfilerow , int currentfilecol ) throws IOException { } }
/* * If the null file doesn ' t exist and the map is an integer , than it is an old integer - map * format , where the novalues are the cells that contain the values 0 */ if ( nullFile != null ) { long byteperrow = ( long ) Math . ceil ( fileWindow . getCols ( ) / 8.0 ) ; // in the // null // map of // cell _ misc long currentByte = ( long ) Math . ceil ( ( currentfilecol + 1 ) / 8.0 ) ; // in the // null // map // currentfilerow starts from 0 , so it is the row before the one we // need long byteToRead = ( byteperrow * currentfilerow ) + currentByte ; nullFile . seek ( byteToRead - 1 ) ; int bitposition = ( currentfilecol ) % 8 ; byte [ ] thetmp = new byte [ 1 ] ; thetmp [ 0 ] = nullFile . readByte ( ) ; BitSet tmp = FileUtilities . fromByteArray ( thetmp ) ; boolean theBit = tmp . get ( 7 - bitposition ) ; /* * if ( theBit ) { System . out . println ( " 1 at position : " + ( 7 - bitposition ) + " due to * bitposition : " + bitposition ) ; } else { System . out . println ( " 0 at position : " + * ( 7 - bitposition ) + " due to bitposition : " + bitposition ) ; } */ return theBit ; } // else // / * There is no null file around * / // if ( rasterMapType > 0) // / / isOldIntegerMap = true ; // return false ; // else // / / throw some exception // return false ; return false ;
public class FeatureMate { /** * Tries to convert the internal geometry to a { @ link LineString } . * < p > This works only for Polygon and Lines features . * < p > From this moment on the internal geometry ( as got by the { @ link # getGeometry ( ) } ) * will be the line type . * < p > To get the original geometry one can simply call { @ link # resetGeometry ( ) } . * @ throws IllegalArgumentException in the case the geometry is a point . */ public void convertToLine ( ) throws IllegalArgumentException { } }
EGeometryType geometryType = EGeometryType . forGeometry ( getGeometry ( ) ) ; switch ( geometryType ) { case MULTIPOLYGON : case POLYGON : // convert to line Coordinate [ ] tmpCoords = geometry . getCoordinates ( ) ; geometry = GeometryUtilities . gf ( ) . createLineString ( tmpCoords ) ; // reset prepared geometry preparedGeometry = null ; break ; case LINESTRING : case MULTILINESTRING : // do nothing , is already line break ; default : throw new IllegalArgumentException ( "Points not supported" ) ; }
public class CommsInboundChain { /** * ( non - Javadoc ) * @ see com . ibm . wsspi . channelfw . ChainEventListener # chainStarted ( com . ibm . websphere . channelfw . ChainData ) */ @ Override public void chainStarted ( ChainData chainData ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "chainStarted" , chainData ) ; chainState . set ( ChainState . STARTED . val ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "chainStarted" ) ;
public class NetworkWatchersInner { /** * Updates a network watcher tags . * @ param resourceGroupName The name of the resource group . * @ param networkWatcherName The name of the network watcher . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the NetworkWatcherInner object */ public Observable < NetworkWatcherInner > updateTagsAsync ( String resourceGroupName , String networkWatcherName ) { } }
return updateTagsWithServiceResponseAsync ( resourceGroupName , networkWatcherName ) . map ( new Func1 < ServiceResponse < NetworkWatcherInner > , NetworkWatcherInner > ( ) { @ Override public NetworkWatcherInner call ( ServiceResponse < NetworkWatcherInner > response ) { return response . body ( ) ; } } ) ;
public class Crossover { /** * / * Xianshun says : ( From Section 5.7.3 of Linear Genetic Programming * Crossover requires , by definition , that information is exchanged between individual programs . * However , an exchange always includes two operations on an individual , the deletion and * the insertion of a subprogram . The imperative program representation allows instructions to be * deleted without replacement since instructon operands , e . g . register pointers , are always defined . * Instructions may also be inserted at any position without a preceding deletion , at least if the maximum * program length is not exceeded . * If we want linear crossover to be less disruptive it may be a good idea to execute only one operation per * individual . this consideration motivates a one - segment or one - way recombination of linear genetic * programs as described by Algorithm 5.3. * Standard linear crossover may also be refered to as two - segment recombinations , in these terms . */ private static void oneSegmentCrossover ( Program gp1 , Program gp2 , LGP manager , RandEngine randEngine ) { } }
double prob_r = randEngine . uniform ( ) ; if ( ( gp1 . length ( ) < manager . getMaxProgramLength ( ) ) && ( ( prob_r <= manager . getInsertionProbability ( ) || gp1 . length ( ) == manager . getMinProgramLength ( ) ) ) ) { int i1 = randEngine . nextInt ( gp1 . length ( ) ) ; int max_segment_length = gp2 . length ( ) < manager . getMaxSegmentLength ( ) ? gp2 . length ( ) : manager . getMaxSegmentLength ( ) ; int ls2 = 1 + randEngine . nextInt ( max_segment_length ) ; if ( gp1 . length ( ) + ls2 > manager . getMaxProgramLength ( ) ) { ls2 = manager . getMaxProgramLength ( ) - gp2 . length ( ) + 1 ; } if ( ls2 == gp2 . length ( ) ) { ls2 = gp2 . length ( ) - 1 ; } int i2 = randEngine . nextInt ( gp2 . length ( ) - ls2 ) ; List < Instruction > instructions1 = gp1 . getInstructions ( ) ; List < Instruction > instructions2 = gp2 . getInstructions ( ) ; List < Instruction > s = new ArrayList < > ( ) ; for ( int i = i2 ; i != ( i2 + ls2 ) ; ++ i ) { Instruction instruction = instructions2 . get ( i ) ; Instruction instruction_cloned = instruction . makeCopy ( ) ; s . add ( InstructionHelper . reassign2Program ( instruction_cloned , gp1 ) ) ; } instructions1 . addAll ( i1 , s ) ; } if ( ( gp1 . length ( ) > manager . getMinProgramLength ( ) ) && ( ( prob_r > manager . getInsertionProbability ( ) ) || gp1 . length ( ) == manager . getMaxProgramLength ( ) ) ) { int max_segment_length = ( gp2 . length ( ) < manager . getMaxSegmentLength ( ) ) ? gp2 . length ( ) : manager . getMaxSegmentLength ( ) ; int ls1 = 1 + randEngine . nextInt ( max_segment_length ) ; if ( gp1 . length ( ) < ls1 ) { ls1 = gp1 . length ( ) - manager . getMinProgramLength ( ) ; } else if ( gp1 . length ( ) - ls1 < manager . getMinProgramLength ( ) ) { ls1 = gp1 . length ( ) - manager . getMinProgramLength ( ) ; } int i1 = randEngine . nextInt ( gp1 . length ( ) - ls1 ) ; List < Instruction > instructions1 = gp1 . getInstructions ( ) ; for ( int j = ls1 - 1 ; j >= i1 ; j -- ) { instructions1 . remove ( j ) ; } } gp1 . invalidateCost ( ) ;
public class MergeAction { /** * Emits a sentence fragment combining all the merge actions . */ public static void addActionsTo ( SourceBuilder code , Set < MergeAction > mergeActions , boolean forBuilder ) { } }
SetMultimap < String , String > nounsByVerb = TreeMultimap . create ( ) ; mergeActions . forEach ( mergeAction -> { if ( forBuilder || ! mergeAction . builderOnly ) { nounsByVerb . put ( mergeAction . verb , mergeAction . noun ) ; } } ) ; List < String > verbs = ImmutableList . copyOf ( nounsByVerb . keySet ( ) ) ; String lastVerb = getLast ( verbs , null ) ; for ( String verb : nounsByVerb . keySet ( ) ) { code . add ( ", %s%s" , ( verbs . size ( ) > 1 && verb . equals ( lastVerb ) ) ? "and " : "" , verb ) ; List < String > nouns = ImmutableList . copyOf ( nounsByVerb . get ( verb ) ) ; for ( int i = 0 ; i < nouns . size ( ) ; ++ i ) { String separator = ( i == 0 ) ? "" : ( i == nouns . size ( ) - 1 ) ? " and" : "," ; code . add ( "%s %s" , separator , nouns . get ( i ) ) ; } }
public class Script { /** * Exposes the script interpreter . Normally you should not use this directly , instead use * { @ link TransactionInput # verify ( TransactionOutput ) } or * { @ link Script # correctlySpends ( Transaction , int , TransactionWitness , Coin , Script , Set ) } . This method * is useful if you need more precise control or access to the final state of the stack . This interface is very * likely to change in future . * @ deprecated Use { @ link # executeScript ( Transaction , long , Script , LinkedList , Set ) } * instead . */ @ Deprecated public static void executeScript ( @ Nullable Transaction txContainingThis , long index , Script script , LinkedList < byte [ ] > stack , boolean enforceNullDummy ) throws ScriptException { } }
final EnumSet < VerifyFlag > flags = enforceNullDummy ? EnumSet . of ( VerifyFlag . NULLDUMMY ) : EnumSet . noneOf ( VerifyFlag . class ) ; executeScript ( txContainingThis , index , script , stack , flags ) ;
public class ID3v2Tag { /** * Determines the new amount of padding to use . If the user has not * changed the amount of padding then existing padding will be overwritten * instead of increasing the size of the file . That is only if there is * a sufficient amount of padding for the updated tag . * @ return the new amount of padding */ private int getUpdatedPadding ( ) { } }
int curSize = getSize ( ) ; int pad = 0 ; if ( ( origPadding == padding ) && ( curSize > origSize ) && ( padding >= ( curSize - origSize ) ) ) { pad = padding - ( curSize - origSize ) ; } else if ( curSize < origSize ) { pad = ( origSize - curSize ) + padding ; } return pad ;
public class StoreRoutingPlan { /** * Check that the key belongs to one of the partitions in the map of replica * type to partitions * @ param nodeId Node on which this is running ( generally stealer node ) * @ param key The key to check * @ param replicaToPartitionList Mapping of replica type to partition list * @ param cluster Cluster metadata * @ param storeDef The store definition * @ return Returns a boolean to indicate if this belongs to the map */ public static boolean checkKeyBelongsToPartition ( int nodeId , byte [ ] key , HashMap < Integer , List < Integer > > replicaToPartitionList , Cluster cluster , StoreDefinition storeDef ) { } }
boolean checkResult = false ; if ( storeDef . getRoutingStrategyType ( ) . equals ( RoutingStrategyType . TO_ALL_STRATEGY ) || storeDef . getRoutingStrategyType ( ) . equals ( RoutingStrategyType . TO_ALL_LOCAL_PREF_STRATEGY ) ) { checkResult = true ; } else { List < Integer > keyPartitions = new RoutingStrategyFactory ( ) . updateRoutingStrategy ( storeDef , cluster ) . getPartitionList ( key ) ; List < Integer > nodePartitions = cluster . getNodeById ( nodeId ) . getPartitionIds ( ) ; checkResult = StoreRoutingPlan . checkKeyBelongsToPartition ( keyPartitions , nodePartitions , replicaToPartitionList ) ; } return checkResult ;
public class SequenceScalePanel { /** * set some default rendering hints , like text antialiasing on * @ param g2D the graphics object to set the defaults on */ protected void setPaintDefaults ( Graphics2D g2D ) { } }
g2D . setRenderingHint ( RenderingHints . KEY_TEXT_ANTIALIASING , RenderingHints . VALUE_TEXT_ANTIALIAS_ON ) ; g2D . setRenderingHint ( RenderingHints . KEY_ANTIALIASING , RenderingHints . VALUE_ANTIALIAS_ON ) ; g2D . setFont ( seqFont ) ;
public class FuzzySymbolicVariableConstraintSolver { /** * public LinkedHashMap < FuzzySymbolicDomain [ ] , Double > getLabelings ( ) { * return this . sdTuples ; */ private boolean checkEquality ( HashMap < String , Double > a , HashMap < String , Double > b ) { } }
for ( String s : a . keySet ( ) ) { if ( Double . compare ( a . get ( s ) , b . get ( s ) ) != 0 ) return false ; } return true ;
public class QueryBuilder { /** * Set arguments for query . * @ param args Arguments . * @ return The query builder instance for chained calls . */ @ SafeVarargs public final QueryBuilder arguments ( Object ... args ) { } }
set ( Param . QUERY_ARGS , "" ) ; queryArgs = args . clone ( ) ; return this ;
public class FlinkKafkaConsumerBase { /** * Version - specific subclasses which can expose the functionality should override and allow public access . */ protected FlinkKafkaConsumerBase < T > setStartFromTimestamp ( long startupOffsetsTimestamp ) { } }
checkArgument ( startupOffsetsTimestamp >= 0 , "The provided value for the startup offsets timestamp is invalid." ) ; long currentTimestamp = System . currentTimeMillis ( ) ; checkArgument ( startupOffsetsTimestamp <= currentTimestamp , "Startup time[%s] must be before current time[%s]." , startupOffsetsTimestamp , currentTimestamp ) ; this . startupMode = StartupMode . TIMESTAMP ; this . startupOffsetsTimestamp = startupOffsetsTimestamp ; this . specificStartupOffsets = null ; return this ;
public class EndpointImpl { /** * { @ inheritDoc } */ @ Override public boolean getHideFromDiscovery ( ) { } }
String v = this . unknownAttributes . getOrDefault ( hideFromDiscoveryQname , XSBooleanValue . toString ( false , false ) ) ; return XSBooleanValue . valueOf ( v ) . getValue ( ) ;
public class JavaDialect { /** * This will add the rule for compiling later on . * It will not actually call the compiler */ public void addRule ( final RuleBuildContext context ) { } }
final RuleImpl rule = context . getRule ( ) ; final RuleDescr ruleDescr = context . getRuleDescr ( ) ; RuleClassBuilder classBuilder = context . getDialect ( ) . getRuleClassBuilder ( ) ; String ruleClass = classBuilder . buildRule ( context ) ; // return if there is no ruleclass name ; if ( ruleClass == null ) { return ; } // The compilation result is for the entire rule , so difficult to associate with any descr addClassCompileTask ( this . pkg . getName ( ) + "." + ruleDescr . getClassName ( ) , ruleDescr , ruleClass , this . src , new RuleErrorHandler ( ruleDescr , rule , "Rule Compilation error" ) ) ; JavaDialectRuntimeData data = ( JavaDialectRuntimeData ) this . pkg . getDialectRuntimeRegistry ( ) . getDialectData ( ID ) ; for ( Map . Entry < String , String > invokers : context . getInvokers ( ) . entrySet ( ) ) { final String className = invokers . getKey ( ) ; // Check if an invoker - returnvalue , predicate , eval or consequence has been associated // If so we add it to the PackageCompilationData as it will get wired up on compilation final Object invoker = context . getInvokerLookup ( className ) ; if ( invoker instanceof Wireable ) { data . putInvoker ( className , ( Wireable ) invoker ) ; } final String text = invokers . getValue ( ) ; final BaseDescr descr = context . getDescrLookup ( className ) ; addClassCompileTask ( className , descr , text , this . src , new RuleInvokerErrorHandler ( descr , rule , "Unable to generate rule invoker." ) ) ; } // setup the line mappins for this rule final String name = this . pkg . getName ( ) + "." + StringUtils . ucFirst ( ruleDescr . getClassName ( ) ) ; final LineMappings mapping = new LineMappings ( name ) ; mapping . setStartLine ( ruleDescr . getConsequenceLine ( ) ) ; mapping . setOffset ( ruleDescr . getConsequenceOffset ( ) ) ; this . pkg . getDialectRuntimeRegistry ( ) . getLineMappings ( ) . put ( name , mapping ) ;
public class InterProcessSemaphore { /** * < p > Acquire < code > qty < / code > leases . If there are not enough leases available , this method * blocks until either the maximum number of leases is increased enough or other clients / processes * close enough leases . However , this method will only block to a maximum of the time * parameters given . If time expires before all leases are acquired , the subset of acquired * leases are automatically closed . < / p > * < p > The client must close the leases when it is done with them . You should do this in a * < code > finally < / code > block . NOTE : You can use { @ link # returnAll ( Collection ) } for this . < / p > * @ param qty number of leases to acquire * @ param time time to wait * @ param unit time unit * @ return the new leases or null if time ran out * @ throws Exception ZK errors , interruptions , etc . */ public Collection < Lease > acquire ( int qty , long time , TimeUnit unit ) throws Exception { } }
long startMs = System . currentTimeMillis ( ) ; long waitMs = TimeUnit . MILLISECONDS . convert ( time , unit ) ; Preconditions . checkArgument ( qty > 0 , "qty cannot be 0" ) ; ImmutableList . Builder < Lease > builder = ImmutableList . builder ( ) ; try { while ( qty -- > 0 ) { long elapsedMs = System . currentTimeMillis ( ) - startMs ; long thisWaitMs = waitMs - elapsedMs ; String path = ( thisWaitMs > 0 ) ? internals . attemptLock ( thisWaitMs , TimeUnit . MILLISECONDS , null ) : null ; if ( path == null ) { returnAll ( builder . build ( ) ) ; return null ; } builder . add ( makeLease ( path ) ) ; } } catch ( Exception e ) { ThreadUtils . checkInterrupted ( e ) ; returnAll ( builder . build ( ) ) ; throw e ; } return builder . build ( ) ;
public class UndirectedMultigraph { /** * { @ inheritDoc } */ public Set < TypedEdge < T > > getEdges ( int vertex1 , int vertex2 ) { } }
SparseTypedEdgeSet < T > edges = vertexToEdges . get ( vertex1 ) ; return ( edges == null ) ? Collections . < TypedEdge < T > > emptySet ( ) : edges . getEdges ( vertex2 ) ;
public class AppPackageUrl { /** * Get Resource Url for UpdatePackage * @ param applicationKey The application key uniquely identifies the developer namespace , application ID , version , and package in Dev Center . The format is { Dev Account namespace } . { Application ID } . { Application Version } . { Package name } . * @ param responseFields Filtering syntax appended to an API call to increase or decrease the amount of data returned inside a JSON object . This parameter should only be used to retrieve data . Attempting to update data using this parameter may cause data loss . * @ return String Resource Url */ public static MozuUrl updatePackageUrl ( String applicationKey , String responseFields ) { } }
UrlFormatter formatter = new UrlFormatter ( "/api/platform/appdev/apppackages/{applicationKey}/?responseFields={responseFields}" ) ; formatter . formatUrl ( "applicationKey" , applicationKey ) ; formatter . formatUrl ( "responseFields" , responseFields ) ; return new MozuUrl ( formatter . getResourceUrl ( ) , MozuUrl . UrlLocation . TENANT_POD ) ;
public class NBTOutputStream { /** * Writes a tag . * @ param tag The tag to write . * @ throws java . io . IOException if an I / O error occurs . */ public void writeTag ( Tag < ? > tag ) throws IOException { } }
String name = tag . getName ( ) ; byte [ ] nameBytes = name . getBytes ( NBTConstants . CHARSET . name ( ) ) ; os . writeByte ( tag . getType ( ) . getId ( ) ) ; os . writeShort ( nameBytes . length ) ; os . write ( nameBytes ) ; if ( tag . getType ( ) == TagType . TAG_END ) { throw new IOException ( "Named TAG_End not permitted." ) ; } writeTagPayload ( tag ) ;
public class ClassUtils { /** * Similar as { @ link # classForName ( String ) } , but also supports primitive types * and arrays as specified for the JavaType element in the JavaServer Faces Config DTD . * @ param type fully qualified class name or name of a primitive type , both optionally * followed by " [ ] " to indicate an array type * @ return the corresponding Class * @ throws NullPointerException if type is null * @ throws ClassNotFoundException */ public static Class javaTypeToClass ( String type ) throws ClassNotFoundException { } }
if ( type == null ) { throw new NullPointerException ( "type" ) ; } // try common types and arrays of common types first Class clazz = ( Class ) COMMON_TYPES . get ( type ) ; if ( clazz != null ) { return clazz ; } int len = type . length ( ) ; if ( len > 2 && type . charAt ( len - 1 ) == ']' && type . charAt ( len - 2 ) == '[' ) { String componentType = type . substring ( 0 , len - 2 ) ; Class componentTypeClass = classForName ( componentType ) ; return Array . newInstance ( componentTypeClass , 0 ) . getClass ( ) ; } return classForName ( type ) ;
public class ServerService { /** * Modify ALL existing public IPs on servers * @ param servers The list of server references * @ param config publicIp config * @ return OperationFuture wrapper for list of ServerRef */ public OperationFuture < List < Server > > modifyPublicIp ( List < Server > servers , ModifyPublicIpConfig config ) { } }
List < JobFuture > futures = servers . stream ( ) . map ( serverRef -> modifyPublicIp ( serverRef , config ) . jobFuture ( ) ) . collect ( toList ( ) ) ; return new OperationFuture < > ( servers , new ParallelJobsFuture ( futures ) ) ;
public class Solo { /** * Clicks a CheckBox matching the specified index . * @ param index the index of the { @ link CheckBox } to click . { @ code 0 } if only one is available */ public void clickOnCheckBox ( int index ) { } }
if ( config . commandLogging ) { Log . d ( config . commandLoggingTag , "clickOnCheckBox(" + index + ")" ) ; } clicker . clickOn ( CheckBox . class , index ) ;
public class AttributeTypeImpl { /** * Checks that existing instances match the provided regex . * @ param regex The regex to check against * @ throws TransactionException when an instance does not match the provided regex */ private void checkInstancesMatchRegex ( @ Nullable String regex ) { } }
if ( regex != null ) { Pattern pattern = Pattern . compile ( regex ) ; instances ( ) . forEach ( resource -> { String value = ( String ) resource . value ( ) ; Matcher matcher = pattern . matcher ( value ) ; if ( ! matcher . matches ( ) ) { throw TransactionException . regexFailure ( this , value , regex ) ; } } ) ; }
public class AbstractMapServiceFactory { /** * Returns a { @ link MapService } object by populating it with required * auxiliary services . * @ return { @ link MapService } object */ @ Override public MapService createMapService ( ) { } }
NodeEngine nodeEngine = getNodeEngine ( ) ; MapServiceContext mapServiceContext = getMapServiceContext ( ) ; ManagedService managedService = createManagedService ( ) ; CountingMigrationAwareService migrationAwareService = createMigrationAwareService ( ) ; TransactionalService transactionalService = createTransactionalService ( ) ; RemoteService remoteService = createRemoteService ( ) ; EventPublishingService eventPublishingService = createEventPublishingService ( ) ; PostJoinAwareService postJoinAwareService = createPostJoinAwareService ( ) ; SplitBrainHandlerService splitBrainHandlerService = createSplitBrainHandlerService ( ) ; ReplicationSupportingService replicationSupportingService = createReplicationSupportingService ( ) ; StatisticsAwareService statisticsAwareService = createStatisticsAwareService ( ) ; PartitionAwareService partitionAwareService = createPartitionAwareService ( ) ; MapQuorumAwareService quorumAwareService = createQuorumAwareService ( ) ; ClientAwareService clientAwareService = createClientAwareService ( ) ; checkNotNull ( nodeEngine , "nodeEngine should not be null" ) ; checkNotNull ( mapServiceContext , "mapServiceContext should not be null" ) ; checkNotNull ( managedService , "managedService should not be null" ) ; checkNotNull ( migrationAwareService , "migrationAwareService should not be null" ) ; checkNotNull ( transactionalService , "transactionalService should not be null" ) ; checkNotNull ( remoteService , "remoteService should not be null" ) ; checkNotNull ( eventPublishingService , "eventPublishingService should not be null" ) ; checkNotNull ( postJoinAwareService , "postJoinAwareService should not be null" ) ; checkNotNull ( splitBrainHandlerService , "splitBrainHandlerService should not be null" ) ; checkNotNull ( replicationSupportingService , "replicationSupportingService should not be null" ) ; checkNotNull ( statisticsAwareService , "statisticsAwareService should not be null" ) ; checkNotNull ( partitionAwareService , "partitionAwareService should not be null" ) ; checkNotNull ( quorumAwareService , "quorumAwareService should not be null" ) ; checkNotNull ( clientAwareService , "clientAwareService should not be null" ) ; MapService mapService = new MapService ( ) ; mapService . managedService = managedService ; mapService . migrationAwareService = migrationAwareService ; mapService . transactionalService = transactionalService ; mapService . remoteService = remoteService ; mapService . eventPublishingService = eventPublishingService ; mapService . postJoinAwareService = postJoinAwareService ; mapService . splitBrainHandlerService = splitBrainHandlerService ; mapService . replicationSupportingService = replicationSupportingService ; mapService . statisticsAwareService = statisticsAwareService ; mapService . mapServiceContext = mapServiceContext ; mapService . partitionAwareService = partitionAwareService ; mapService . quorumAwareService = quorumAwareService ; mapService . clientAwareService = clientAwareService ; mapServiceContext . setService ( mapService ) ; return mapService ;
public class AfplibFactoryImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public String convertCMRFidelityStpCMRExToString ( EDataType eDataType , Object instanceValue ) { } }
return instanceValue == null ? null : instanceValue . toString ( ) ;
public class PolicyEventsInner { /** * Queries policy events for the subscription level policy set definition . * @ param subscriptionId Microsoft Azure subscription ID . * @ param policySetDefinitionName Policy set definition name . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ throws QueryFailureException thrown if the request is rejected by server * @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @ return the PolicyEventsQueryResultsInner object if successful . */ public PolicyEventsQueryResultsInner listQueryResultsForPolicySetDefinition ( String subscriptionId , String policySetDefinitionName ) { } }
return listQueryResultsForPolicySetDefinitionWithServiceResponseAsync ( subscriptionId , policySetDefinitionName ) . toBlocking ( ) . single ( ) . body ( ) ;
public class RandomUtil { /** * 随机获得列表中的元素 * @ param < T > 元素类型 * @ param list 列表 * @ param limit 限制列表的前N项 * @ return 随机元素 */ public static < T > T randomEle ( List < T > list , int limit ) { } }
return list . get ( randomInt ( limit ) ) ;
public class VariantMetadataManager { /** * Add a variant file metadata to a given variant study metadata ( from study ID ) . * @ param fileMetadata Variant file metadata to add * @ param studyId Study ID */ public void addFile ( VariantFileMetadata fileMetadata , String studyId ) { } }
// Sanity check if ( fileMetadata == null || StringUtils . isEmpty ( fileMetadata . getId ( ) ) ) { logger . error ( "Variant file metadata (or its ID) is null or empty." ) ; return ; } VariantStudyMetadata variantStudyMetadata = getVariantStudyMetadata ( studyId ) ; if ( variantStudyMetadata == null ) { logger . error ( "Study not found. Check your study ID: '{}'" , studyId ) ; return ; } if ( variantStudyMetadata . getFiles ( ) == null ) { variantStudyMetadata . setFiles ( new ArrayList < > ( ) ) ; } for ( VariantFileMetadata file : variantStudyMetadata . getFiles ( ) ) { if ( file . getId ( ) != null && file . getId ( ) . equals ( fileMetadata . getId ( ) ) ) { logger . error ( "Variant file metadata with id '{}' already exists in study '{}'" , fileMetadata . getId ( ) , studyId ) ; return ; } } // individual management if ( variantStudyMetadata . getIndividuals ( ) == null ) { variantStudyMetadata . setIndividuals ( new ArrayList < > ( ) ) ; } if ( ! variantStudyMetadata . getIndividuals ( ) . isEmpty ( ) ) { // check if samples are already in study for ( String sampleId : fileMetadata . getSampleIds ( ) ) { for ( org . opencb . biodata . models . metadata . Individual individual : variantStudyMetadata . getIndividuals ( ) ) { for ( Sample sample : individual . getSamples ( ) ) { if ( sampleId . equals ( sample . getId ( ) ) ) { logger . error ( "Sample '{}' from file {} already exists in study '{}'" , sampleId , fileMetadata . getId ( ) , studyId ) ; return ; } } } } } // by default , create individuals from sample , and individual ID takes the sample ID // TODO : manage multiple samples per individual for ( String sampleId : fileMetadata . getSampleIds ( ) ) { List < Sample > samples = new ArrayList < > ( ) ; Sample sample = new Sample ( ) ; sample . setId ( sampleId ) ; sample . setAnnotations ( new HashMap < > ( ) ) ; samples . add ( sample ) ; org . opencb . biodata . models . metadata . Individual individual = new org . opencb . biodata . models . metadata . Individual ( ) ; individual . setId ( sampleId ) ; individual . setSamples ( samples ) ; variantStudyMetadata . getIndividuals ( ) . add ( individual ) ; } variantStudyMetadata . getFiles ( ) . add ( fileMetadata ) ;
public class UseSplit { /** * implements the visitor to make sure the class is at least java 1.4 and to reset the opcode stack */ @ Override public void visitClassContext ( ClassContext classContext ) { } }
try { JavaClass cls = classContext . getJavaClass ( ) ; if ( cls . getMajor ( ) >= Const . MAJOR_1_4 ) { stack = new OpcodeStack ( ) ; regValueType = new HashMap < Integer , State > ( ) ; super . visitClassContext ( classContext ) ; } } finally { stack = null ; regValueType = null ; }
public class DefaultNodeCreator { /** * { @ inheritDoc } */ @ Override public Node createNode ( GraphDatabaseService database ) { } }
Node node = database . createNode ( label ) ; node . setProperty ( UUID , java . util . UUID . randomUUID ( ) . toString ( ) ) ; return node ;
public class Traversal { /** * A helper method to retrieve a single result from the query or throw an exception if the query yields no results . * @ param query the query to run * @ param entityType the expected type of the entity ( used only for error reporting ) * @ return the single result * @ throws EntityNotFoundException if the query doesn ' t return any results */ protected BE getSingle ( Query query , SegmentType entityType ) { } }
return inTx ( tx -> Util . getSingle ( tx , query , entityType ) ) ;
public class FluentFunctions { /** * Convert a checked statement ( e . g . a method or Consumer with no return value that throws a Checked Exception ) to a * fluent expression ( FluentFunction ) . The input is returned as emitted * < pre > * { @ code * public void print ( String input ) throws IOException { * System . out . println ( input ) ; * FluentFunctions . checkedExpression ( this : : print ) * . applyHKT ( " hello " ) * < / pre > * @ param action * @ return FluentFunction */ public static < T > FluentFunctions . FluentFunction < T , T > checkedExpression ( final CheckedConsumer < T > action ) { } }
final Consumer < T > toUse = ExceptionSoftener . softenConsumer ( action ) ; return FluentFunctions . of ( t -> { toUse . accept ( t ) ; return t ; } ) ;
public class ScorecardModel { /** * Use the rule interpreter */ public double score_interpreter ( final HashMap < String , Comparable > row ) { } }
double score = _initialScore ; for ( int i = 0 ; i < _rules . length ; i ++ ) score += _rules [ i ] . score ( row . get ( _colNames [ i ] ) ) ; return score ;
public class VoiceApi { /** * Initiate a two - step conference to the specified destination . This places the existing call on * hold and creates a new call in the dialing state ( step 1 ) . After initiating the conference you can use * ` completeConference ( ) ` to complete the conference and bring all parties into the same call ( step 2 ) . * @ param connId The connection ID of the call to start the conference from . This call will be placed on hold . * @ param destination The number to be dialed . */ public void initiateConference ( String connId , String destination ) throws WorkspaceApiException { } }
this . initiateConference ( connId , destination , null , null , null , null , null ) ;
public class Signatures { /** * Collects methods of { @ code clazz } with the given { @ code name } . * Methods are included if their modifier bits match each bit of { @ code include } * and no bit of { @ code exclude } . * @ param clazz * @ param name * @ param include * @ param exclude * @ return methods */ public static Method [ ] collectMethods ( Class < ? > clazz , String name , int include , int exclude ) { } }
final List < Method > result = new ArrayList < > ( ) ; collectMethods ( result , new ArrayList < Class < ? > [ ] > ( ) , new HashSet < Class < ? > > ( ) , clazz , name , include , exclude ) ; return result . toArray ( new Method [ result . size ( ) ] ) ;
public class LFltToDblFunctionBuilder { /** * One of ways of creating builder . This is possibly the least verbose way where compiler should be able to guess the generic parameters . */ @ Nonnull public static LFltToDblFunction fltToDblFunctionFrom ( Consumer < LFltToDblFunctionBuilder > buildingFunction ) { } }
LFltToDblFunctionBuilder builder = new LFltToDblFunctionBuilder ( ) ; buildingFunction . accept ( builder ) ; return builder . build ( ) ;
public class DrawMessage { /** * Inits the graphics2D object . * @ param g * the Graphics object . * @ return the graphics2 d */ private Graphics2D initGraphics2D ( final Graphics g ) { } }
Graphics2D g2 ; g2 = ( Graphics2D ) g ; g2 . setRenderingHint ( RenderingHints . KEY_ANTIALIASING , RenderingHints . VALUE_ANTIALIAS_ON ) ; g2 . setRenderingHint ( RenderingHints . KEY_RENDERING , RenderingHints . VALUE_RENDER_QUALITY ) ; g2 . setColor ( this . color ) ; return g2 ;
public class CommerceUserSegmentEntryLocalServiceBaseImpl { /** * Updates the commerce user segment entry in the database or adds it if it does not yet exist . Also notifies the appropriate model listeners . * @ param commerceUserSegmentEntry the commerce user segment entry * @ return the commerce user segment entry that was updated */ @ Indexable ( type = IndexableType . REINDEX ) @ Override public CommerceUserSegmentEntry updateCommerceUserSegmentEntry ( CommerceUserSegmentEntry commerceUserSegmentEntry ) { } }
return commerceUserSegmentEntryPersistence . update ( commerceUserSegmentEntry ) ;
public class Weekcycle { /** * / * [ deutsch ] * < p > Ermittelt den zeitlichen Abstand zwischen den angegebenen * Datumsangaben gemessen in dieser Einheit . < / p > * @ param start starting date * @ param end ending date * @ return duration as count of this unit */ public long between ( PlainDate start , PlainDate end ) { } }
return this . derive ( start ) . between ( start , end ) ;
public class DatabaseDAODefaultImpl { public void put_device_attribute_property ( Database database , String deviceName , DbAttribute attr ) throws DevFailed { } }
DbAttribute [ ] da = new DbAttribute [ 1 ] ; da [ 0 ] = attr ; put_device_attribute_property ( database , deviceName , da ) ;
public class ImplCommonOps_DSCC { /** * Performs matrix addition : < br > * C = & alpha ; A + & beta ; B * @ param alpha scalar value multiplied against A * @ param A Matrix * @ param beta scalar value multiplied against B * @ param B Matrix * @ param C Output matrix . * @ param gw ( Optional ) Storage for internal workspace . Can be null . * @ param gx ( Optional ) Storage for internal workspace . Can be null . */ public static void add ( double alpha , DMatrixSparseCSC A , double beta , DMatrixSparseCSC B , DMatrixSparseCSC C , @ Nullable IGrowArray gw , @ Nullable DGrowArray gx ) { } }
double [ ] x = adjust ( gx , A . numRows ) ; int [ ] w = adjust ( gw , A . numRows , A . numRows ) ; C . indicesSorted = false ; C . nz_length = 0 ; for ( int col = 0 ; col < A . numCols ; col ++ ) { C . col_idx [ col ] = C . nz_length ; multAddColA ( A , col , alpha , C , col + 1 , x , w ) ; multAddColA ( B , col , beta , C , col + 1 , x , w ) ; // take the values in the dense vector ' x ' and put them into ' C ' int idxC0 = C . col_idx [ col ] ; int idxC1 = C . col_idx [ col + 1 ] ; for ( int i = idxC0 ; i < idxC1 ; i ++ ) { C . nz_values [ i ] = x [ C . nz_rows [ i ] ] ; } }
public class ApiOvhRouter { /** * Alter this object properties * REST : PUT / router / { serviceName } / vpn / { id } * @ param body [ required ] New object properties * @ param serviceName [ required ] The internal name of your Router offer * @ param id [ required ] */ public void serviceName_vpn_id_PUT ( String serviceName , Long id , OvhVpn body ) throws IOException { } }
String qPath = "/router/{serviceName}/vpn/{id}" ; StringBuilder sb = path ( qPath , serviceName , id ) ; exec ( qPath , "PUT" , sb . toString ( ) , body ) ;
public class StreamConduit { /** * Creates a stream pumper to copy the given input stream to the * given output stream . * @ param is the input stream to copy from . * @ param os the output stream to copy to . * @ param closeWhenExhausted if true close the inputstream . * @ return a thread object that does the pumping , subclasses * should return an instance of { @ code PumpStreamHandler . ThreadWithPumper * ThreadWithPumper } . */ private Thread createPump ( InputStream is , OutputStream os , boolean closeWhenExhausted ) { } }
BlockingStreamPumper pumper = new BlockingStreamPumper ( is , os , closeWhenExhausted ) ; // pumper . setAutoflush ( true ) ; / / always auto - flush final Thread result = new ThreadWithPumper ( pumper ) ; result . setDaemon ( true ) ; return result ;
public class JaxRsClientFactory { /** * Register a list of features for all created clients . */ @ SafeVarargs public final synchronized JaxRsClientFactory addFeatureToAllClients ( Class < ? extends Feature > ... features ) { } }
return addFeatureToGroup ( PrivateFeatureGroup . WILDCARD , features ) ;
public class FindBugsWorker { /** * this method will block current thread until the findbugs is running * @ param findBugs * fb engine , which will be < b > disposed < / b > after the analysis is * done */ private void runFindBugs ( final FindBugs2 findBugs ) { } }
if ( DEBUG ) { FindbugsPlugin . log ( "Running findbugs in thread " + Thread . currentThread ( ) . getName ( ) ) ; } System . setProperty ( "findbugs.progress" , "true" ) ; try { // Perform the analysis ! ( note : This is not thread - safe ) findBugs . execute ( ) ; } catch ( InterruptedException e ) { if ( DEBUG ) { FindbugsPlugin . getDefault ( ) . logException ( e , "Worker interrupted" ) ; } Thread . currentThread ( ) . interrupt ( ) ; } catch ( IOException e ) { FindbugsPlugin . getDefault ( ) . logException ( e , "Error performing SpotBugs analysis" ) ; } finally { findBugs . dispose ( ) ; }
public class IncrementalSemanticAnalysis { /** * Adds the index vector to the semantic vector using the percentage to * specify how much of each dimesion is added . * @ param semantics the semantic vector whose values will be modified by the * index vector * @ param index the index vector that will be added to the semantic vector * @ param the percentage of the index vector ' s values that will be added to * the semantic vector */ private static void add ( DoubleVector semantics , TernaryVector index , double percentage ) { } }
for ( int p : index . positiveDimensions ( ) ) semantics . add ( p , percentage ) ; for ( int n : index . negativeDimensions ( ) ) semantics . add ( n , - percentage ) ;
public class AmazonCloudDirectoryClient { /** * Retrieves metadata about an object . * @ param getObjectInformationRequest * @ return Result of the GetObjectInformation operation returned by the service . * @ throws InternalServiceException * Indicates a problem that must be resolved by Amazon Web Services . This might be a transient error in * which case you can retry your request until it succeeds . Otherwise , go to the < a * href = " http : / / status . aws . amazon . com / " > AWS Service Health Dashboard < / a > site to see if there are any * operational issues with the service . * @ throws InvalidArnException * Indicates that the provided ARN value is not valid . * @ throws RetryableConflictException * Occurs when a conflict with a previous successful write is detected . For example , if a write operation * occurs on an object and then an attempt is made to read the object using “ SERIALIZABLE ” consistency , this * exception may result . This generally occurs when the previous write did not have time to propagate to the * host serving the current request . A retry ( with appropriate backoff logic ) is the recommended response to * this exception . * @ throws ValidationException * Indicates that your request is malformed in some manner . See the exception message . * @ throws LimitExceededException * Indicates that limits are exceeded . See < a * href = " https : / / docs . aws . amazon . com / clouddirectory / latest / developerguide / limits . html " > Limits < / a > for more * information . * @ throws AccessDeniedException * Access denied . Check your permissions . * @ throws DirectoryNotEnabledException * Operations are only permitted on enabled directories . * @ throws ResourceNotFoundException * The specified resource could not be found . * @ sample AmazonCloudDirectory . GetObjectInformation * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / clouddirectory - 2017-01-11 / GetObjectInformation " * target = " _ top " > AWS API Documentation < / a > */ @ Override public GetObjectInformationResult getObjectInformation ( GetObjectInformationRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeGetObjectInformation ( request ) ;
public class LdapSearcher { /** * Search LDAP and stores the results in searchResults field . * @ param context the name of the context where the search starts ( the depth depends on ldap . search . scope ) * @ param filterExpr the filter expression to use for the search . The expression may contain variables of the form * " < code > { i } < / code > " where < code > i < / code > is a non - negative integer . May not be null . * @ param filterArgs the array of arguments to substitute for the variables in < code > filterExpr < / code > . The value of * < code > filterArgs [ i ] < / code > will replace each occurrence of " < code > { i } < / code > " . If null , an equivalent of an empty * array is used . * @ return this */ public LdapSearcher search ( String context , String filterExpr , Object ... filterArgs ) { } }
searchResults . clear ( ) ; LdapContext ldapContext = null ; NamingEnumeration < SearchResult > ldapResult = null ; try { ldapContext = buildLdapContext ( ) ; ldapResult = ldapContext . search ( context , filterExpr , filterArgs , createSearchControls ( ) ) ; while ( ldapResult . hasMore ( ) ) { searchResults . add ( ldapResult . next ( ) ) ; } } catch ( NamingException ex ) { throw new RuntimeException ( "LDAP search has failed" , ex ) ; } finally { if ( ldapResult != null ) { try { ldapResult . close ( ) ; } catch ( NamingException ex ) { log . error ( "Failed to close LDAP results enumeration" , ex ) ; } } if ( ldapContext != null ) { try { ldapContext . close ( ) ; } catch ( NamingException ex ) { log . error ( "Failed to close LDAP context" , ex ) ; } } } return this ;
public class IOUtil { /** * close Writer without a Exception * @ param w */ public static void closeEL ( Transport t ) { } }
try { if ( t != null && t . isConnected ( ) ) t . close ( ) ; } catch ( Throwable e ) { ExceptionUtil . rethrowIfNecessary ( e ) ; }
public class SQLUtils { /** * Get the max query result * @ param connection * connection * @ param table * table name * @ param column * column name * @ param where * where clause * @ param args * where arguments * @ return max or null * @ since 1.1.1 */ public static Integer max ( Connection connection , String table , String column , String where , String [ ] args ) { } }
Integer max = null ; if ( count ( connection , table , where , args ) > 0 ) { StringBuilder maxQuery = new StringBuilder ( ) ; maxQuery . append ( "select max(" ) . append ( CoreSQLUtils . quoteWrap ( column ) ) . append ( ") from " ) . append ( CoreSQLUtils . quoteWrap ( table ) ) ; if ( where != null ) { maxQuery . append ( " where " ) . append ( where ) ; } String sql = maxQuery . toString ( ) ; max = querySingleInteger ( connection , sql , args , false ) ; } return max ;
public class FloatWritable { /** * Compares two FloatWritables . */ public int compareTo ( Object o ) { } }
float thisValue = this . value ; float thatValue = ( ( FloatWritable ) o ) . value ; return ( thisValue < thatValue ? - 1 : ( thisValue == thatValue ? 0 : 1 ) ) ;
public class ConcurrentAwaitableCounter { /** * Somewhat loosely defined wait for " next N increments " , because the starting point is not defined from the Java * Memory Model perspective . */ public void awaitNextIncrements ( long nextIncrements ) throws InterruptedException { } }
if ( nextIncrements <= 0 ) { throw new IllegalArgumentException ( "nextIncrements is not positive: " + nextIncrements ) ; } if ( nextIncrements > MAX_COUNT / 4 ) { throw new UnsupportedOperationException ( "Couldn't wait for so many increments: " + nextIncrements ) ; } awaitCount ( ( sync . getCount ( ) + nextIncrements ) & MAX_COUNT ) ;
public class Config { /** * Returns absolute path to . ekstazi director ( URI . toString ) . Note * that the directory is not created with this invocation . * @ param parentDir * Parent directory for . ekstazi directory * @ return An absolute path ( URI . toString ) that describes . ekstazi directory */ public static String getRootDirURI ( File parentDir ) { } }
String pathAsString = parentDir . getAbsolutePath ( ) + System . getProperty ( "file.separator" ) + Names . EKSTAZI_ROOT_DIR_NAME ; return new File ( pathAsString ) . toURI ( ) . toString ( ) ;
public class TorrentHandle { /** * Returns an array ( list ) with information about pieces that are partially * downloaded or not downloaded at all but partially requested . See * { @ link PartialPieceInfo } for the fields in the returned vector . * @ return a list with partial piece info */ public ArrayList < PartialPieceInfo > getDownloadQueue ( ) { } }
partial_piece_info_vector v = new partial_piece_info_vector ( ) ; th . get_download_queue ( v ) ; int size = ( int ) v . size ( ) ; ArrayList < PartialPieceInfo > l = new ArrayList < > ( size ) ; for ( int i = 0 ; i < size ; i ++ ) { l . add ( new PartialPieceInfo ( v . get ( i ) ) ) ; } return l ;
public class CircularQueueCaptureQueriesListener { /** * Log all captured SELECT queries */ public void logSelectQueriesForCurrentThread ( ) { } }
List < String > queries = getSelectQueriesForCurrentThread ( ) . stream ( ) . map ( CircularQueueCaptureQueriesListener :: formatQueryAsSql ) . collect ( Collectors . toList ( ) ) ; ourLog . info ( "Select Queries:\n{}" , String . join ( "\n" , queries ) ) ;
public class ConstructorConstructor { /** * Constructors for common interface types like Map and List and their * subtypes . */ @ SuppressWarnings ( "unchecked" ) // use runtime checks to guarantee that ' T ' is what it is private < T > ObjectConstructor < T > newDefaultImplementationConstructor ( final Type type , Class < ? super T > rawType ) { } }
if ( Collection . class . isAssignableFrom ( rawType ) ) { if ( SortedSet . class . isAssignableFrom ( rawType ) ) { return new ObjectConstructor < T > ( ) { @ Override public T construct ( ) { return ( T ) new TreeSet < Object > ( ) ; } } ; } else if ( EnumSet . class . isAssignableFrom ( rawType ) ) { return new ObjectConstructor < T > ( ) { @ SuppressWarnings ( "rawtypes" ) @ Override public T construct ( ) { if ( type instanceof ParameterizedType ) { Type elementType = ( ( ParameterizedType ) type ) . getActualTypeArguments ( ) [ 0 ] ; if ( elementType instanceof Class ) { return ( T ) EnumSet . noneOf ( ( Class ) elementType ) ; } else { throw new JsonIOException ( "Invalid EnumSet type: " + type . toString ( ) ) ; } } else { throw new JsonIOException ( "Invalid EnumSet type: " + type . toString ( ) ) ; } } } ; } else if ( Set . class . isAssignableFrom ( rawType ) ) { return new ObjectConstructor < T > ( ) { @ Override public T construct ( ) { return ( T ) new LinkedHashSet < Object > ( ) ; } } ; } else if ( Queue . class . isAssignableFrom ( rawType ) ) { return new ObjectConstructor < T > ( ) { @ Override public T construct ( ) { return ( T ) new ArrayDeque < Object > ( ) ; } } ; } else { return new ObjectConstructor < T > ( ) { @ Override public T construct ( ) { return ( T ) new ArrayList < Object > ( ) ; } } ; } } if ( Map . class . isAssignableFrom ( rawType ) ) { if ( ConcurrentNavigableMap . class . isAssignableFrom ( rawType ) ) { return new ObjectConstructor < T > ( ) { @ Override public T construct ( ) { return ( T ) new ConcurrentSkipListMap < Object , Object > ( ) ; } } ; } else if ( ConcurrentMap . class . isAssignableFrom ( rawType ) ) { return new ObjectConstructor < T > ( ) { @ Override public T construct ( ) { return ( T ) new ConcurrentHashMap < Object , Object > ( ) ; } } ; } else if ( SortedMap . class . isAssignableFrom ( rawType ) ) { return new ObjectConstructor < T > ( ) { @ Override public T construct ( ) { return ( T ) new TreeMap < Object , Object > ( ) ; } } ; } else if ( type instanceof ParameterizedType && ! ( String . class . isAssignableFrom ( TypeToken . get ( ( ( ParameterizedType ) type ) . getActualTypeArguments ( ) [ 0 ] ) . getRawType ( ) ) ) ) { return new ObjectConstructor < T > ( ) { @ Override public T construct ( ) { return ( T ) new LinkedHashMap < Object , Object > ( ) ; } } ; } else { return new ObjectConstructor < T > ( ) { @ Override public T construct ( ) { return ( T ) new LinkedTreeMap < String , Object > ( ) ; } } ; } } return null ;
public class ClientHandshaker { /** * Server ' s own key was either a signing - only key , or was too * large for export rules . . . this message holds an ephemeral * RSA key to use for key exchange . */ private void serverKeyExchange ( RSA_ServerKeyExchange mesg ) throws IOException , GeneralSecurityException { } }
if ( debug != null && Debug . isOn ( "handshake" ) ) { mesg . print ( System . out ) ; } if ( ! mesg . verify ( serverKey , clnt_random , svr_random ) ) { fatalSE ( Alerts . alert_handshake_failure , "server key exchange invalid" ) ; // NOTREACHED } ephemeralServerKey = mesg . getPublicKey ( ) ;
public class LinearSolverQr_DDRM { /** * Performs QR decomposition on A * @ param A not modified . */ @ Override public boolean setA ( DMatrixRMaj A ) { } }
if ( A . numRows > maxRows || A . numCols > maxCols ) { setMaxSize ( A . numRows , A . numCols ) ; } _setA ( A ) ; if ( ! decomposer . decompose ( A ) ) return false ; Q . reshape ( numRows , numRows , false ) ; R . reshape ( numRows , numCols , false ) ; decomposer . getQ ( Q , false ) ; decomposer . getR ( R , false ) ; return true ;
public class AstUtil { /** * Return true only if the specified object responds to the named method * @ param object - the object to check * @ param methodName - the name of the method * @ return true if the object responds to the named method */ public static boolean respondsTo ( Object object , String methodName ) { } }
MetaClass metaClass = DefaultGroovyMethods . getMetaClass ( object ) ; if ( ! metaClass . respondsTo ( object , methodName ) . isEmpty ( ) ) { return true ; } Map properties = DefaultGroovyMethods . getProperties ( object ) ; return properties . containsKey ( methodName ) ;
public class ReferenceEventAnalysisEngine { /** * Count the number of { @ link Event } s over a time { @ link Interval } specified in milliseconds . * @ param existingEvents set of { @ link Event } s matching triggering { @ link Event } id / user pulled from { @ link Event } storage * @ param triggeringEvent the { @ link Event } that triggered analysis * @ param configuredDetectionPoint the { @ link DetectionPoint } we are currently considering * @ return number of { @ link Event } s matching time { @ link Interval } and configured { @ link DetectionPoint } */ protected int countEvents ( Collection < Event > existingEvents , Event triggeringEvent , DetectionPoint configuredDetectionPoint ) { } }
int count = 0 ; long intervalInMillis = configuredDetectionPoint . getThreshold ( ) . getInterval ( ) . toMillis ( ) ; // grab the startTime to begin counting from based on the current time - interval DateTime startTime = DateUtils . getCurrentTimestamp ( ) . minusMillis ( ( int ) intervalInMillis ) ; // count events after most recent attack . DateTime mostRecentAttackTime = findMostRecentAttackTime ( triggeringEvent , configuredDetectionPoint ) ; for ( Event event : existingEvents ) { DateTime eventTimestamp = DateUtils . fromString ( event . getTimestamp ( ) ) ; // ensure only events that have occurred since the last attack are considered if ( eventTimestamp . isAfter ( mostRecentAttackTime ) ) { if ( intervalInMillis > 0 ) { if ( DateUtils . fromString ( event . getTimestamp ( ) ) . isAfter ( startTime ) ) { // only increment when event occurs within specified interval count ++ ; } } else { // no interval - all events considered count ++ ; } } } return count ;
public class Proxy { /** * Specifies a username for the SOCKS proxy . Supported by SOCKS v5 and above . * @ param username username for the SOCKS proxy * @ return reference to self */ public Proxy setSocksUsername ( String username ) { } }
verifyProxyTypeCompatibility ( ProxyType . MANUAL ) ; this . proxyType = ProxyType . MANUAL ; this . socksUsername = username ; return this ;
public class FormatUtil { /** * Formats the double array d with the default number format . * @ param buf String builder to append to * @ param d the double array to be formatted * @ param sep separator between the single values of the array , e . g . ' , ' * @ return Output buffer buf */ public static StringBuilder formatTo ( StringBuilder buf , double [ ] d , String sep ) { } }
if ( d == null ) { return buf . append ( "null" ) ; } if ( d . length == 0 ) { return buf ; } buf . append ( d [ 0 ] ) ; for ( int i = 1 ; i < d . length ; i ++ ) { buf . append ( sep ) . append ( d [ i ] ) ; } return buf ;
public class CasConfigurationJasyptCipherExecutor { /** * Decrypt value string . ( but don ' t log error , for use in shell ) . * @ param value the value * @ return the string */ public String decryptValuePropagateExceptions ( final String value ) { } }
if ( StringUtils . isNotBlank ( value ) && value . startsWith ( ENCRYPTED_VALUE_PREFIX ) ) { initializeJasyptInstanceIfNecessary ( ) ; val encValue = value . substring ( ENCRYPTED_VALUE_PREFIX . length ( ) ) ; LOGGER . trace ( "Decrypting value [{}]..." , encValue ) ; val result = this . jasyptInstance . decrypt ( encValue ) ; if ( StringUtils . isNotBlank ( result ) ) { LOGGER . debug ( "Decrypted value [{}] successfully." , encValue ) ; return result ; } LOGGER . warn ( "Encrypted value [{}] has no values." , encValue ) ; } return value ;
public class MarketplaceAgreementsInner { /** * Get marketplace agreement . * @ param publisherId Publisher identifier string of image being deployed . * @ param offerId Offer identifier string of image being deployed . * @ param planId Plan identifier string of image being deployed . * @ param serviceCallback the async ServiceCallback to handle successful and failed responses . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the { @ link ServiceFuture } object */ public ServiceFuture < AgreementTermsInner > getAgreementAsync ( String publisherId , String offerId , String planId , final ServiceCallback < AgreementTermsInner > serviceCallback ) { } }
return ServiceFuture . fromResponse ( getAgreementWithServiceResponseAsync ( publisherId , offerId , planId ) , serviceCallback ) ;
public class SoapClient { /** * 设置编码 * @ param charset 编码 * @ return this */ public SoapClient setCharset ( Charset charset ) { } }
this . charset = charset ; try { this . message . setProperty ( SOAPMessage . CHARACTER_SET_ENCODING , this . charset . toString ( ) ) ; this . message . setProperty ( SOAPMessage . WRITE_XML_DECLARATION , "true" ) ; } catch ( SOAPException e ) { // ignore } return this ;
public class VirtualHubsInner { /** * Lists all the VirtualHubs in a subscription . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the PagedList & lt ; VirtualHubInner & gt ; object */ public Observable < Page < VirtualHubInner > > listAsync ( ) { } }
return listWithServiceResponseAsync ( ) . map ( new Func1 < ServiceResponse < Page < VirtualHubInner > > , Page < VirtualHubInner > > ( ) { @ Override public Page < VirtualHubInner > call ( ServiceResponse < Page < VirtualHubInner > > response ) { return response . body ( ) ; } } ) ;
public class CmsXmlContent { /** * Ensures the parent values to the given path are created . < p > * @ param cms the cms context * @ param valuePath the value path * @ param locale the content locale */ private void ensureParentValues ( CmsObject cms , String valuePath , Locale locale ) { } }
if ( valuePath . contains ( "/" ) ) { String parentPath = valuePath . substring ( 0 , valuePath . lastIndexOf ( "/" ) ) ; if ( ! hasValue ( parentPath , locale ) ) { ensureParentValues ( cms , parentPath , locale ) ; int index = CmsXmlUtils . getXpathIndexInt ( parentPath ) - 1 ; addValue ( cms , parentPath , locale , index ) ; } }
public class HeatChart { /** * Draws the y - axis label string if it is not null . */ private void drawYLabel ( Graphics2D chartGraphics ) { } }
if ( yAxisLabel != null ) { // Strings are drawn from the baseline position of the leftmost char . int yPosYAxisLabel = heatMapC . y + ( yAxisLabelSize . width / 2 ) ; int xPosYAxisLabel = ( margin / 2 ) + yAxisLabelAscent ; chartGraphics . setFont ( axisLabelsFont ) ; chartGraphics . setColor ( axisLabelColour ) ; // Create 270 degree rotated transform . AffineTransform transform = chartGraphics . getTransform ( ) ; AffineTransform originalTransform = ( AffineTransform ) transform . clone ( ) ; transform . rotate ( Math . toRadians ( 270 ) , xPosYAxisLabel , yPosYAxisLabel ) ; chartGraphics . setTransform ( transform ) ; // Draw string . chartGraphics . drawString ( yAxisLabel , xPosYAxisLabel , yPosYAxisLabel ) ; // Revert to original transform before rotation . chartGraphics . setTransform ( originalTransform ) ; }
public class ScriptRuntime { /** * Convert the value to a boolean . * See ECMA 9.2. */ public static boolean toBoolean ( Object val ) { } }
for ( ; ; ) { if ( val instanceof Boolean ) return ( ( Boolean ) val ) . booleanValue ( ) ; if ( val == null || val == Undefined . instance ) return false ; if ( val instanceof CharSequence ) return ( ( CharSequence ) val ) . length ( ) != 0 ; if ( val instanceof Number ) { double d = ( ( Number ) val ) . doubleValue ( ) ; return ( d == d && d != 0.0 ) ; } if ( val instanceof Scriptable ) { if ( val instanceof ScriptableObject && ( ( ScriptableObject ) val ) . avoidObjectDetection ( ) ) { return false ; } if ( Context . getContext ( ) . isVersionECMA1 ( ) ) { // pure ECMA return true ; } // ECMA extension val = ( ( Scriptable ) val ) . getDefaultValue ( BooleanClass ) ; if ( ( val instanceof Scriptable ) && ! isSymbol ( val ) ) throw errorWithClassName ( "msg.primitive.expected" , val ) ; continue ; } warnAboutNonJSObject ( val ) ; return true ; }
public class ChecksumExtensions { /** * Gets the checksum from the given byte arrays with the given algorithm * @ param algorithm * the algorithm to get the checksum . This could be for instance " MD4 " , " MD5 " , * " SHA - 1 " , " SHA - 256 " , " SHA - 384 " or " SHA - 512 " . * @ param byteArrays * the array of byte arrays * @ return The checksum from the given byte arrays as a String object . * @ throws NoSuchAlgorithmException * Is thrown if the algorithm is not supported or does not exists . * { @ link java . security . MessageDigest } object . */ public static String getChecksum ( final Algorithm algorithm , final byte [ ] ... byteArrays ) throws NoSuchAlgorithmException { } }
StringBuilder sb = new StringBuilder ( ) ; for ( byte [ ] byteArray : byteArrays ) { sb . append ( getChecksum ( byteArray , algorithm . getAlgorithm ( ) ) ) ; } return sb . toString ( ) ;
public class OSHelper { /** * Write a file * @ param bytes * @ param targetFileName * @ throws OSHelperException */ public static void writeFile ( byte [ ] bytes , String targetFileName ) throws OSHelperException { } }
FileOutputStream fos ; try { fos = new FileOutputStream ( targetFileName ) ; } catch ( FileNotFoundException ex ) { throw new OSHelperException ( "Received a FileNotFoundException when trying to open target file " + targetFileName + "." , ex ) ; } try { fos . write ( bytes ) ; } catch ( IOException ex ) { throw new OSHelperException ( "Received an IOException when trying to write to target file " + targetFileName + "." , ex ) ; } finally { try { fos . close ( ) ; } catch ( IOException ex ) { throw new OSHelperException ( "Received an IOException when trying to close FileOutputStream for target file " + targetFileName + "." , ex ) ; } }
public class GetIntentResult { /** * An array of sample utterances configured for the intent . * @ param sampleUtterances * An array of sample utterances configured for the intent . */ public void setSampleUtterances ( java . util . Collection < String > sampleUtterances ) { } }
if ( sampleUtterances == null ) { this . sampleUtterances = null ; return ; } this . sampleUtterances = new java . util . ArrayList < String > ( sampleUtterances ) ;
public class ClientRegistry { /** * Gets the { @ link TextureAtlasSprite } to used for the { @ link IBlockState } . < br > * Called via ASM from { @ link BlockModelShapes # getTexture ( IBlockState ) } * @ param state the state * @ return the particle icon */ static TextureAtlasSprite getParticleIcon ( IBlockState state ) { } }
Icon icon = null ; IIconProvider provider = IComponent . getComponent ( IIconProvider . class , state . getBlock ( ) ) ; if ( provider instanceof IBlockIconProvider ) icon = ( ( IBlockIconProvider ) provider ) . getParticleIcon ( state ) ; else if ( provider != null ) icon = provider . getIcon ( ) ; return icon != null ? icon : Icon . missing ;
public class NodeTemplateClient { /** * Sets the access control policy on the specified resource . Replaces any existing policy . * < p > Sample code : * < pre > < code > * try ( NodeTemplateClient nodeTemplateClient = NodeTemplateClient . create ( ) ) { * ProjectRegionNodeTemplateResourceName resource = ProjectRegionNodeTemplateResourceName . of ( " [ PROJECT ] " , " [ REGION ] " , " [ RESOURCE ] " ) ; * RegionSetPolicyRequest regionSetPolicyRequestResource = RegionSetPolicyRequest . newBuilder ( ) . build ( ) ; * Policy response = nodeTemplateClient . setIamPolicyNodeTemplate ( resource . toString ( ) , regionSetPolicyRequestResource ) ; * < / code > < / pre > * @ param resource Name or id of the resource for this request . * @ param regionSetPolicyRequestResource * @ throws com . google . api . gax . rpc . ApiException if the remote call fails */ @ BetaApi public final Policy setIamPolicyNodeTemplate ( String resource , RegionSetPolicyRequest regionSetPolicyRequestResource ) { } }
SetIamPolicyNodeTemplateHttpRequest request = SetIamPolicyNodeTemplateHttpRequest . newBuilder ( ) . setResource ( resource ) . setRegionSetPolicyRequestResource ( regionSetPolicyRequestResource ) . build ( ) ; return setIamPolicyNodeTemplate ( request ) ;
public class RowOutputBinary { /** * Calculate the size of byte array required to store a row . * @ param data - the row data * @ param l - number of data [ ] elements to include in calculation * @ param types - array of java . sql . Types values * @ return size of byte array */ private static int getSize ( Object [ ] data , int l , Type [ ] types ) { } }
int s = 0 ; for ( int i = 0 ; i < l ; i ++ ) { Object o = data [ i ] ; s += 1 ; // type or null if ( o != null ) { switch ( types [ i ] . typeCode ) { case Types . SQL_ALL_TYPES : throw Error . runtimeError ( ErrorCode . U_S0500 , "RowOutputBinary" ) ; case Types . SQL_CHAR : case Types . SQL_VARCHAR : case Types . VARCHAR_IGNORECASE : s += 4 ; s += StringConverter . getUTFSize ( ( String ) o ) ; break ; case Types . TINYINT : case Types . SQL_SMALLINT : s += 2 ; break ; case Types . SQL_INTEGER : s += 4 ; break ; case Types . SQL_BIGINT : case Types . SQL_REAL : case Types . SQL_FLOAT : case Types . SQL_DOUBLE : s += 8 ; break ; case Types . SQL_NUMERIC : case Types . SQL_DECIMAL : s += 8 ; BigDecimal bigdecimal = ( BigDecimal ) o ; BigInteger bigint = JavaSystem . unscaledValue ( bigdecimal ) ; s += bigint . toByteArray ( ) . length ; break ; case Types . SQL_BOOLEAN : s += 1 ; break ; case Types . SQL_DATE : s += 8 ; break ; case Types . SQL_TIME : s += 8 ; break ; case Types . SQL_TIME_WITH_TIME_ZONE : s += 12 ; break ; case Types . SQL_TIMESTAMP : s += 12 ; break ; case Types . SQL_TIMESTAMP_WITH_TIME_ZONE : s += 16 ; break ; case Types . SQL_INTERVAL_YEAR : case Types . SQL_INTERVAL_YEAR_TO_MONTH : case Types . SQL_INTERVAL_MONTH : s += 8 ; break ; case Types . SQL_INTERVAL_DAY : case Types . SQL_INTERVAL_DAY_TO_HOUR : case Types . SQL_INTERVAL_DAY_TO_MINUTE : case Types . SQL_INTERVAL_DAY_TO_SECOND : case Types . SQL_INTERVAL_HOUR : case Types . SQL_INTERVAL_HOUR_TO_MINUTE : case Types . SQL_INTERVAL_HOUR_TO_SECOND : case Types . SQL_INTERVAL_MINUTE : case Types . SQL_INTERVAL_MINUTE_TO_SECOND : case Types . SQL_INTERVAL_SECOND : s += 12 ; break ; case Types . SQL_BINARY : case Types . SQL_VARBINARY : s += 4 ; s += ( ( BinaryData ) o ) . length ( null ) ; break ; case Types . SQL_BIT : case Types . SQL_BIT_VARYING : s += 4 ; s += ( ( BinaryData ) o ) . length ( null ) ; break ; case Types . SQL_CLOB : case Types . SQL_BLOB : s += 8 ; break ; case Types . OTHER : JavaObjectData jo = ( JavaObjectData ) o ; s += 4 ; s += jo . getBytesLength ( ) ; break ; default : throw Error . runtimeError ( ErrorCode . U_S0500 , "RowOutputBinary" ) ; } } } return s ;
public class ByteArrayWrapper { /** * Compare this object to another ByteArrayWrapper , which must not be null . * @ param other the object to compare to . * @ return a value & lt ; 0 , 0 , or & gt ; 0 as this compares less than , equal to , or * greater than other . * @ throws ClassCastException if the other object is not a ByteArrayWrapper */ public int compareTo ( ByteArrayWrapper other ) { } }
if ( this == other ) return 0 ; int minSize = size < other . size ? size : other . size ; for ( int i = 0 ; i < minSize ; ++ i ) { if ( bytes [ i ] != other . bytes [ i ] ) { return ( bytes [ i ] & 0xFF ) - ( other . bytes [ i ] & 0xFF ) ; } } return size - other . size ;
public class StringUtils { /** * Performs a case insensitive comparison and returns true if the data * begins with the given sequence . */ public static boolean beginsWithIgnoreCase ( final String data , final String seq ) { } }
return data . regionMatches ( true , 0 , seq , 0 , seq . length ( ) ) ;