signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class PoolOperations { /** * Adds a pool to the Batch account . * @ param poolId * The ID of the pool . * @ param virtualMachineSize * The size of virtual machines in the pool . See < a href = * " https : / / azure . microsoft . com / documentation / articles / virtual - machines - size - specs / " > https : / / azure . microsoft . com / documentation / articles / virtual - machines - size - specs / < / a > * for sizes . * @ param virtualMachineConfiguration * The { @ link VirtualMachineConfiguration } for the pool . * @ param targetDedicatedNodes * The desired number of dedicated compute nodes in the pool . * @ throws BatchErrorException * Exception thrown when an error response is received from the * Batch service . * @ throws IOException * Exception thrown when there is an error in * serialization / deserialization of data sent to / received from the * Batch service . */ public void createPool ( String poolId , String virtualMachineSize , VirtualMachineConfiguration virtualMachineConfiguration , int targetDedicatedNodes ) throws BatchErrorException , IOException { } }
createPool ( poolId , virtualMachineSize , virtualMachineConfiguration , targetDedicatedNodes , 0 , null ) ;
public class PathTokenizer { /** * Apply any . or . . paths in the path param . * @ param path the path * @ return simple path , containing no . or . . paths */ public static String applySpecialPaths ( String path ) throws IllegalArgumentException { } }
List < String > tokens = getTokens ( path ) ; if ( tokens == null ) { return null ; } int i = 0 ; for ( int j = 0 ; j < tokens . size ( ) ; j ++ ) { String token = tokens . get ( j ) ; if ( isCurrentToken ( token ) ) { continue ; } else if ( isReverseToken ( token ) ) { i -- ; } else { tokens . set ( i ++ , token ) ; } if ( i < 0 ) { throw VFSMessages . MESSAGES . onRootPath ( ) ; } } return getRemainingPath ( tokens , 0 , i ) ;
public class AsmUtils { /** * Utility method to load class files of later Java versions by patching them , so ASM can read them . Does nothing at the moment . */ @ SuppressForbidden @ SuppressWarnings ( "unused" ) public static ClassReader readAndPatchClass ( InputStream in ) throws IOException { } }
final byte [ ] bytecode = readStream ( in ) ; if ( false ) patchClassMajorVersion ( bytecode , Opcodes . V10 + 1 , Opcodes . V10 ) ; return new ClassReader ( bytecode ) ;
public class RegularFile { /** * Copies the last { @ code count } blocks from this file to the end of the given target file . */ void copyBlocksTo ( RegularFile target , int count ) { } }
int start = blockCount - count ; int targetEnd = target . blockCount + count ; target . expandIfNecessary ( targetEnd ) ; System . arraycopy ( this . blocks , start , target . blocks , target . blockCount , count ) ; target . blockCount = targetEnd ;
public class CmsContentNotification { /** * Appends a table showing a set of resources , and the cause of the notification . < p > * @ param htmlMsg html the StringBuffer to append the html code to * @ param notificationCauseList the list of notification causes * @ param header the title of the resource list */ private void appendResourceList ( StringBuffer htmlMsg , List < CmsExtendedNotificationCause > notificationCauseList , String header ) { } }
if ( ! notificationCauseList . isEmpty ( ) ) { htmlMsg . append ( "<tr><td colspan=\"5\"><br/><p style=\"margin-top:20px;margin-bottom:10px;\"><b>" ) ; htmlMsg . append ( header ) ; htmlMsg . append ( "</b></p></td></tr><tr class=\"trow1\"><td><div style=\"padding-top:2px;padding-bottom:2px;\">" ) ; htmlMsg . append ( m_messages . key ( Messages . GUI_RESOURCE_0 ) ) ; htmlMsg . append ( "</div></td><td><div style=\"padding-top:2px;padding-bottom:2px;padding-left:10px;\">" ) ; htmlMsg . append ( m_messages . key ( Messages . GUI_SITE_0 ) ) ; htmlMsg . append ( "</div></td><td><div style=\"padding-top:2px;padding-bottom:2px;padding-left:10px;\">" ) ; htmlMsg . append ( m_messages . key ( Messages . GUI_ISSUE_0 ) ) ; htmlMsg . append ( "</div></td><td colspan=\"2\"/></tr>" ) ; Iterator < CmsExtendedNotificationCause > notificationCauses = notificationCauseList . iterator ( ) ; for ( int i = 0 ; notificationCauses . hasNext ( ) ; i ++ ) { CmsExtendedNotificationCause notificationCause = notificationCauses . next ( ) ; htmlMsg . append ( buildNotificationListItem ( notificationCause , ( i % 2 ) + 2 ) ) ; } }
public class Server { /** * Get MBean attribute * @ param name The bean name * @ param attrName The attribute name * @ return The data * @ exception JMException Thrown if an error occurs */ public static String getMBeanAttribute ( String name , String attrName ) throws JMException { } }
MBeanServer server = getMBeanServer ( ) ; ObjectName objName = new ObjectName ( name ) ; String value = null ; try { Object attr = server . getAttribute ( objName , attrName ) ; if ( attr != null ) value = attr . toString ( ) ; } catch ( JMException e ) { value = e . getMessage ( ) ; } return value ;
public class Locator { /** * Create an operation to create a new locator entity . * @ param accessPolicyId * id of access policy for locator * @ param assetId * id of asset for locator * @ param locatorType * locator type * @ return the operation */ public static Creator create ( String accessPolicyId , String assetId , LocatorType locatorType ) { } }
return new Creator ( accessPolicyId , assetId , locatorType ) ;
public class BootstrapMethodAttribute { /** * Writes the field to the output . */ @ Override public void write ( ByteCodeWriter out ) throws IOException { } }
out . writeUTF8Const ( getName ( ) ) ; TempOutputStream ts = new TempOutputStream ( ) ; // ts . openWrite ( ) ; // WriteStream ws = new WriteStream ( ts ) ; ByteCodeWriter o2 = new ByteCodeWriter ( ts , out . getJavaClass ( ) ) ; o2 . writeShort ( _methods . size ( ) ) ; for ( int i = 0 ; i < _methods . size ( ) ; i ++ ) { BootstrapMethod method = _methods . get ( i ) ; o2 . writeShort ( method . getMethodRef ( ) ) ; o2 . writeShort ( method . getArgumentSize ( ) ) ; for ( ConstantPoolEntry entry : method . getArguments ( ) ) { o2 . writeShort ( entry . getIndex ( ) ) ; } } ts . close ( ) ; out . writeInt ( ts . getLength ( ) ) ; TempBuffer ptr = ts . getHead ( ) ; for ( ; ptr != null ; ptr = ptr . next ( ) ) { out . write ( ptr . buffer ( ) , 0 , ptr . length ( ) ) ; } ts . destroy ( ) ;
public class RtfImportMgr { /** * Performs the mapping from the original font number to the actual font * number used in the RTF document . If the color number was not * seen during import ( thus no mapping ) then 0 is returned , guaranteeing * that the color number is always valid . * @ param colorNr The color number to map . * @ return The mapped color number */ public String mapColorNr ( String colorNr ) { } }
if ( this . importColorMapping . containsKey ( colorNr ) ) { return ( String ) this . importColorMapping . get ( colorNr ) ; } else { return "0" ; }
public class VpnConnectionsInner { /** * Retrieves all vpn connections for a particular virtual wan vpn gateway . * @ param nextPageLink The NextLink from the previous successful call to List operation . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ throws ErrorException thrown if the request is rejected by server * @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @ return the PagedList & lt ; VpnConnectionInner & gt ; object if successful . */ public PagedList < VpnConnectionInner > listByVpnGatewayNext ( final String nextPageLink ) { } }
ServiceResponse < Page < VpnConnectionInner > > response = listByVpnGatewayNextSinglePageAsync ( nextPageLink ) . toBlocking ( ) . single ( ) ; return new PagedList < VpnConnectionInner > ( response . body ( ) ) { @ Override public Page < VpnConnectionInner > nextPage ( String nextPageLink ) { return listByVpnGatewayNextSinglePageAsync ( nextPageLink ) . toBlocking ( ) . single ( ) . body ( ) ; } } ;
public class Deadline { /** * Resolves a Future , automatically cancelling it in the event of a timeout / failure * @ param < T > * @ param future * @ return * @ throws RuntimeException */ public < T > T resolveFuture ( final Future < T > future ) throws RuntimeException { } }
return resolveFuture ( future , true ) ;
public class BranchLinkData { /** * < b > A string value that represents the stage of the user in the app . eg : * " level1 " , " logged _ in " , etc . < / b > * @ param stage A { @ link String } value specifying the stage . * @ throws JSONException The parameter value must be in valid JSON format , or a * { @ link JSONException } will be thrown . */ public void putStage ( String stage ) throws JSONException { } }
if ( stage != null ) { this . stage = stage ; this . put ( Defines . LinkParam . Stage . getKey ( ) , stage ) ; }
public class ChronoHistory { /** * / * [ deutsch ] * < p > Konvertiert das angegebene ISO - 8601 - Datum zu einem historischen Datum . < / p > * @ param date ISO - 8601 - date ( gregorian ) * @ return historic calendar date * @ throws IllegalArgumentException if given date is out of supported range * @ since 3.0 */ public HistoricDate convert ( PlainDate date ) { } }
long mjd = date . get ( EpochDays . MODIFIED_JULIAN_DATE ) ; HistoricDate hd = null ; for ( int i = this . events . size ( ) - 1 ; i >= 0 ; i -- ) { CutOverEvent event = this . events . get ( i ) ; if ( mjd >= event . start ) { hd = event . algorithm . fromMJD ( mjd ) ; break ; } } if ( hd == null ) { hd = this . getJulianAlgorithm ( ) . fromMJD ( mjd ) ; } HistoricEra era = this . eraPreference . getPreferredEra ( hd , date ) ; if ( era != hd . getEra ( ) ) { int yoe = era . yearOfEra ( hd . getEra ( ) , hd . getYearOfEra ( ) ) ; hd = HistoricDate . of ( era , yoe , hd . getMonth ( ) , hd . getDayOfMonth ( ) ) ; } if ( this . isOutOfRange ( hd ) ) { throw new IllegalArgumentException ( "Out of supported range: " + hd ) ; } return hd ;
public class RslAttributes { /** * Adds a new variable definition to the specified variable definitions * attribute . * @ param attribute the variable definitions attribute - rsl _ subsititution . * @ param varName the variable name to add . * @ param value the value of the variable to add . */ public void addVariable ( String attribute , String varName , String value ) { } }
Bindings binds = rslTree . getBindings ( attribute ) ; if ( binds == null ) { binds = new Bindings ( attribute ) ; rslTree . put ( binds ) ; } binds . add ( new Binding ( varName , value ) ) ;
public class ExtensionManager { /** * Register REST API routes . * Use reflection to find all classes that inherit from { @ link water . api . AbstractRegister } * and call the register ( ) method for each . */ public void registerRestApiExtensions ( ) { } }
if ( restApiExtensionsRegistered ) { throw H2O . fail ( "APIs already registered" ) ; } // Log core extension registrations here so the message is grouped in the right spot . for ( AbstractH2OExtension e : getCoreExtensions ( ) ) { e . printInitialized ( ) ; } Log . info ( "Registered " + coreExtensions . size ( ) + " core extensions in: " + registerCoreExtensionsMillis + "ms" ) ; Log . info ( "Registered H2O core extensions: " + Arrays . toString ( getCoreExtensionNames ( ) ) ) ; if ( listenerExtensions . size ( ) > 0 ) { Log . info ( "Registered: " + listenerExtensions . size ( ) + " listener extensions in: " + registerListenerExtensionsMillis + "ms" ) ; Log . info ( "Registered Listeners extensions: " + Arrays . toString ( getListenerExtensionNames ( ) ) ) ; } if ( authExtensions . size ( ) > 0 ) { Log . info ( "Registered: " + authExtensions . size ( ) + " auth extensions in: " + registerAuthExtensionsMillis + "ms" ) ; Log . info ( "Registered Auth extensions: " + Arrays . toString ( getAuthExtensionNames ( ) ) ) ; } long before = System . currentTimeMillis ( ) ; RequestServer . DummyRestApiContext dummyRestApiContext = new RequestServer . DummyRestApiContext ( ) ; ServiceLoader < RestApiExtension > restApiExtensionLoader = ServiceLoader . load ( RestApiExtension . class ) ; for ( RestApiExtension r : restApiExtensionLoader ) { try { if ( isEnabled ( r ) ) { r . registerEndPoints ( dummyRestApiContext ) ; r . registerSchemas ( dummyRestApiContext ) ; restApiExtensions . put ( r . getName ( ) , r ) ; } } catch ( Exception e ) { Log . info ( "Cannot register extension: " + r + ". Skipping it..." ) ; } } restApiExtensionsRegistered = true ; long registerApisMillis = System . currentTimeMillis ( ) - before ; Log . info ( "Registered: " + RequestServer . numRoutes ( ) + " REST APIs in: " + registerApisMillis + "ms" ) ; Log . info ( "Registered REST API extensions: " + Arrays . toString ( getRestApiExtensionNames ( ) ) ) ; // Register all schemas SchemaServer . registerAllSchemasIfNecessary ( dummyRestApiContext . getAllSchemas ( ) ) ;
public class SecureUtil { /** * 读取KeyStore文件 < br > * KeyStore文件用于数字证书的密钥对保存 < br > * see : http : / / snowolf . iteye . com / blog / 391931 * @ param type 类型 * @ param in { @ link InputStream } 如果想从文件读取 . keystore文件 , 使用 { @ link FileUtil # getInputStream ( java . io . File ) } 读取 * @ param password 密码 * @ return { @ link KeyStore } */ public static KeyStore readKeyStore ( String type , InputStream in , char [ ] password ) { } }
return KeyUtil . readKeyStore ( type , in , password ) ;
public class WarningsProjectAction { /** * Creates the build history . * @ return build history */ @ Override protected BuildHistory createBuildHistory ( ) { } }
AbstractBuild < ? , ? > lastFinishedBuild = getLastFinishedBuild ( ) ; if ( lastFinishedBuild == null ) { return new NullBuildHistory ( ) ; } else { return createHistory ( lastFinishedBuild ) ; }
public class MathBindings { /** * Binding for { @ link java . lang . Math # decrementExact ( int ) } * @ param a the value to decrement * @ return the result * @ throws ArithmeticException if the result overflows an int */ public static IntegerBinding decrementExact ( final ObservableIntegerValue a ) { } }
return createIntegerBinding ( ( ) -> Math . decrementExact ( a . get ( ) ) , a ) ;
public class DefaultGroovyMethods { /** * A variant of collectEntries for Iterators using the identity closure as the transform . * @ param self an Iterator * @ return a Map of the transformed entries * @ see # collectEntries ( Iterable ) * @ since 1.8.7 */ public static < K , V > Map < K , V > collectEntries ( Iterator < ? > self ) { } }
return collectEntries ( self , Closure . IDENTITY ) ;
public class FsCleanableHelper { /** * Delete a single { @ link FileSystemDatasetVersion } . All the parent { @ link Path } s are after deletion , are * added to < code > possiblyEmptyDirectories < / code > . Caller need to call { @ link # cleanEmptyDirectories ( Set , FileSystemDataset ) } * to delete empty parent directories if any . */ public void clean ( final FileSystemDatasetVersion versionToDelete , final Set < Path > possiblyEmptyDirectories ) throws IOException { } }
log . info ( "Deleting dataset version " + versionToDelete ) ; Set < Path > pathsToDelete = versionToDelete . getPaths ( ) ; log . info ( "Deleting paths: " + Arrays . toString ( pathsToDelete . toArray ( ) ) ) ; boolean deletedAllPaths = true ; for ( Path path : pathsToDelete ) { if ( ! this . fs . exists ( path ) ) { log . info ( String . format ( "Path %s in dataset version %s does not exist" , path , versionToDelete ) ) ; continue ; } boolean successfullyDeleted = deleteAsOwner ? trash . moveToTrashAsOwner ( path ) : trash . moveToTrash ( path ) ; if ( successfullyDeleted ) { possiblyEmptyDirectories . add ( path . getParent ( ) ) ; } else { log . error ( "Failed to delete path " + path + " in dataset version " + versionToDelete ) ; deletedAllPaths = false ; } } if ( ! deletedAllPaths ) { log . error ( "Failed to delete some paths in dataset version " + versionToDelete ) ; }
public class DescribeNetworkInterfacesRequest { /** * One or more network interface IDs . * Default : Describes all your network interfaces . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setNetworkInterfaceIds ( java . util . Collection ) } or { @ link # withNetworkInterfaceIds ( java . util . Collection ) } * if you want to override the existing values . * @ param networkInterfaceIds * One or more network interface IDs . < / p > * Default : Describes all your network interfaces . * @ return Returns a reference to this object so that method calls can be chained together . */ public DescribeNetworkInterfacesRequest withNetworkInterfaceIds ( String ... networkInterfaceIds ) { } }
if ( this . networkInterfaceIds == null ) { setNetworkInterfaceIds ( new com . amazonaws . internal . SdkInternalList < String > ( networkInterfaceIds . length ) ) ; } for ( String ele : networkInterfaceIds ) { this . networkInterfaceIds . add ( ele ) ; } return this ;
public class DBColumnPropertySheet { /** * GEN - LAST : event _ tfJavaFieldNameKeyTyped */ private void tfJavaFieldNameFocusLost ( java . awt . event . FocusEvent evt ) // GEN - FIRST : event _ tfJavaFieldNameFocusLost { } }
// GEN - HEADEREND : event _ tfJavaFieldNameFocusLost // Commit the new value to the column if the focus is lost aColumn . setJavaFieldName ( tfJavaFieldName . getText ( ) ) ;
public class DdiRootController { /** * If the action has a maintenance schedule defined but is no longer valid , * cancel the action . * @ param action * is the { @ link Action } to check . */ private void checkAndCancelExpiredAction ( final Action action ) { } }
if ( action != null && action . hasMaintenanceSchedule ( ) && action . isMaintenanceScheduleLapsed ( ) ) { try { controllerManagement . cancelAction ( action . getId ( ) ) ; } catch ( final CancelActionNotAllowedException e ) { LOG . info ( "Cancel action not allowed exception :{}" , e ) ; } }
public class EtcdNettyConfig { /** * Set a custom event loop group . For use within existing netty architectures * @ param eventLoopGroup eventLoopGroup to set . * @ param managed whether event loop group will be closed when etcd client close , true represent yes * @ return itself for chaining . */ public EtcdNettyConfig setEventLoopGroup ( EventLoopGroup eventLoopGroup , boolean managed ) { } }
if ( this . eventLoopGroup != null && this . managedEventLoopGroup ) { // if i manage it , close the old when new one come this . eventLoopGroup . shutdownGracefully ( ) ; } this . eventLoopGroup = eventLoopGroup ; this . managedEventLoopGroup = managed ; return this ;
public class RFaxFaxClientSpi { /** * This function initializes the fax client SPI . */ @ Override protected void initializeImpl ( ) { } }
// get port name this . portName = this . getConfigurationValue ( FaxClientSpiConfigurationConstants . PORT_NAME_PROPERTY_KEY ) ; if ( this . portName == null ) { throw new FaxException ( "Port name not defined in fax4j.properties. Property: " + FaxClientSpiConfigurationConstants . PORT_NAME_PROPERTY_KEY ) ; } // get fax class this . faxClass = Integer . parseInt ( this . getConfigurationValue ( FaxClientSpiConfigurationConstants . FAX_CLASS_PROPERTY_KEY ) ) ;
public class LobEngineTrigger { /** * Returns user specified Lob values */ @ Override public Object beforeInsert ( S storable ) throws PersistException { } }
// Capture user lob values for later and replace with new locators . int length = mLobProperties . length ; Object [ ] userLobs = new Object [ length ] ; for ( int i = 0 ; i < length ; i ++ ) { LobProperty < Lob > prop = mLobProperties [ i ] ; Object userLob = storable . getPropertyValue ( prop . mName ) ; userLobs [ i ] = userLob ; if ( userLob != null ) { Object lob = prop . createNewLob ( mBlockSize ) ; storable . setPropertyValue ( prop . mName , lob ) ; } } return userLobs ;
public class FormatUtils { /** * Formats the given { @ link String } of text . * @ param textPattern { @ link String } text pattern to format . * @ param args array of { @ link Object } arguments to apply to the text pattern . * @ return a formatted { @ link String } of text with the arguments applied to the text pattern . * @ see # messageFormat ( String , Object . . . ) * @ see # stringFormat ( String , Object . . . ) * @ see java . lang . String */ public static String format ( String textPattern , Object ... args ) { } }
return messageFormat ( stringFormat ( textPattern , args ) , args ) ;
public class Log { /** * Report a warning that cannot be suppressed . * @ param pos The source position at which to report the warning . * @ param key The key for the localized warning message . * @ param args Fields of the warning message . */ public void strictWarning ( DiagnosticPosition pos , String key , Object ... args ) { } }
writeDiagnostic ( diags . warning ( null , source , pos , key , args ) ) ; nwarnings ++ ;
public class ByteBufferUtil { /** * Compare two ByteBuffer at specified offsets for length . * Compares the non equal bytes as unsigned . * @ param bytes1 First byte buffer to compare . * @ param offset1 Position to start the comparison at in the first array . * @ param bytes2 Second byte buffer to compare . * @ param offset2 Position to start the comparison at in the second array . * @ param length How many bytes to compare ? * @ return - 1 if byte1 is less than byte2 , 1 if byte2 is less than byte1 or 0 if equal . */ public static int compareSubArrays ( ByteBuffer bytes1 , int offset1 , ByteBuffer bytes2 , int offset2 , int length ) { } }
if ( bytes1 == null ) return bytes2 == null ? 0 : - 1 ; if ( bytes2 == null ) return 1 ; assert bytes1 . limit ( ) >= offset1 + length : "The first byte array isn't long enough for the specified offset and length." ; assert bytes2 . limit ( ) >= offset2 + length : "The second byte array isn't long enough for the specified offset and length." ; for ( int i = 0 ; i < length ; i ++ ) { byte byte1 = bytes1 . get ( offset1 + i ) ; byte byte2 = bytes2 . get ( offset2 + i ) ; if ( byte1 == byte2 ) continue ; // compare non - equal bytes as unsigned return ( byte1 & 0xFF ) < ( byte2 & 0xFF ) ? - 1 : 1 ; } return 0 ;
public class WebApp { /** * Add all entry paths from the Container into the Set */ private void addAllEntries ( Set s , com . ibm . wsspi . adaptable . module . Container dir ) throws UnableToAdaptException { } }
for ( Entry entry : dir ) { String path = entry . getPath ( ) ; com . ibm . wsspi . adaptable . module . Container possibleContainer = entry . adapt ( com . ibm . wsspi . adaptable . module . Container . class ) ; // If this container appears to be a directory then we need to add / to the path // If this container is a nested archive , then we add it as - is . if ( possibleContainer != null && ! possibleContainer . isRoot ( ) ) { path = path + "/" ; } s . add ( path ) ; }
public class Color { /** * Set the color with arithmetic RGB values * @ param red * red float color inclusively between 0.0 and 1.0 * @ param green * green float color inclusively between 0.0 and 1.0 * @ param blue * blue float color inclusively between 0.0 and 1.0 */ public void setColor ( float red , float green , float blue ) { } }
setRed ( red ) ; setGreen ( green ) ; setBlue ( blue ) ;
public class WSEJBProxy { /** * d507967 */ private static int createParameterArray ( GeneratorAdapter mg , Type [ ] argTypes ) { } }
// Object [ ] args = new Object [ # of args ] ; int args = mg . newLocal ( TYPE_Object_ARRAY ) ; mg . push ( argTypes . length ) ; mg . visitTypeInsn ( ANEWARRAY , "java/lang/Object" ) ; mg . storeLocal ( args ) ; // args [ i ] = " parameter " ; - > for each parameter for ( int i = 0 ; i < argTypes . length ; i ++ ) { mg . loadLocal ( args ) ; mg . push ( i ) ; // Convert primities to objects to put in ' args ' array . switch ( argTypes [ i ] . getSort ( ) ) { case Type . BOOLEAN : mg . visitTypeInsn ( NEW , "java/lang/Boolean" ) ; mg . visitInsn ( DUP ) ; mg . loadArg ( i ) ; // for non - static , arg 0 = " this " mg . visitMethodInsn ( INVOKESPECIAL , "java/lang/Boolean" , "<init>" , "(Z)V" ) ; break ; case Type . CHAR : mg . visitTypeInsn ( NEW , "java/lang/Character" ) ; mg . visitInsn ( DUP ) ; mg . loadArg ( i ) ; // for non - static , arg 0 = " this " mg . visitMethodInsn ( INVOKESPECIAL , "java/lang/Character" , "<init>" , "(C)V" ) ; break ; case Type . BYTE : mg . visitTypeInsn ( NEW , "java/lang/Byte" ) ; mg . visitInsn ( DUP ) ; mg . loadArg ( i ) ; // for non - static , arg 0 = " this " mg . visitMethodInsn ( INVOKESPECIAL , "java/lang/Byte" , "<init>" , "(B)V" ) ; break ; case Type . SHORT : mg . visitTypeInsn ( NEW , "java/lang/Short" ) ; mg . visitInsn ( DUP ) ; mg . loadArg ( i ) ; // for non - static , arg 0 = " this " mg . visitMethodInsn ( INVOKESPECIAL , "java/lang/Short" , "<init>" , "(S)V" ) ; break ; case Type . INT : mg . visitTypeInsn ( NEW , "java/lang/Integer" ) ; mg . visitInsn ( DUP ) ; mg . loadArg ( i ) ; // for non - static , arg 0 = " this " mg . visitMethodInsn ( INVOKESPECIAL , "java/lang/Integer" , "<init>" , "(I)V" ) ; break ; case Type . FLOAT : mg . visitTypeInsn ( NEW , "java/lang/Float" ) ; mg . visitInsn ( DUP ) ; mg . loadArg ( i ) ; // for non - static , arg 0 = " this " mg . visitMethodInsn ( INVOKESPECIAL , "java/lang/Float" , "<init>" , "(F)V" ) ; break ; case Type . LONG : mg . visitTypeInsn ( NEW , "java/lang/Long" ) ; mg . visitInsn ( DUP ) ; mg . loadArg ( i ) ; // for non - static , arg 0 = " this " mg . visitMethodInsn ( INVOKESPECIAL , "java/lang/Long" , "<init>" , "(J)V" ) ; break ; case Type . DOUBLE : mg . visitTypeInsn ( NEW , "java/lang/Double" ) ; mg . visitInsn ( DUP ) ; mg . loadArg ( i ) ; // for non - static , arg 0 = " this " mg . visitMethodInsn ( INVOKESPECIAL , "java/lang/Double" , "<init>" , "(D)V" ) ; break ; // ARRAY & OBJECT - no need to copy , just load the arg default : mg . loadArg ( i ) ; // for non - static , arg 0 = " this " break ; } mg . visitInsn ( AASTORE ) ; } return args ;
public class ConfigurationMetadataClassSourceLocator { /** * Build type source path string . * @ param sourcePath the source path * @ param type the type * @ return the string */ public static String buildTypeSourcePath ( final String sourcePath , final String type ) { } }
val newName = type . replace ( "." , File . separator ) ; return sourcePath + "/src/main/java/" + newName + ".java" ;
public class CommerceRegionUtil { /** * Returns the commerce region where uuid = & # 63 ; and groupId = & # 63 ; or returns < code > null < / code > if it could not be found , optionally using the finder cache . * @ param uuid the uuid * @ param groupId the group ID * @ param retrieveFromCache whether to retrieve from the finder cache * @ return the matching commerce region , or < code > null < / code > if a matching commerce region could not be found */ public static CommerceRegion fetchByUUID_G ( String uuid , long groupId , boolean retrieveFromCache ) { } }
return getPersistence ( ) . fetchByUUID_G ( uuid , groupId , retrieveFromCache ) ;
public class CreateAssociationBatchResult { /** * Information about the associations that failed . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setFailed ( java . util . Collection ) } or { @ link # withFailed ( java . util . Collection ) } if you want to override the * existing values . * @ param failed * Information about the associations that failed . * @ return Returns a reference to this object so that method calls can be chained together . */ public CreateAssociationBatchResult withFailed ( FailedCreateAssociation ... failed ) { } }
if ( this . failed == null ) { setFailed ( new com . amazonaws . internal . SdkInternalList < FailedCreateAssociation > ( failed . length ) ) ; } for ( FailedCreateAssociation ele : failed ) { this . failed . add ( ele ) ; } return this ;
public class WiredCache { /** * We need to deal with possible null values and exceptions . This is * a simple placeholder implementation that covers it all by working * on the entry . */ @ Override public Map < K , V > peekAll ( final Iterable < ? extends K > keys ) { } }
Map < K , CacheEntry < K , V > > map = new HashMap < K , CacheEntry < K , V > > ( ) ; for ( K k : keys ) { CacheEntry < K , V > e = execute ( k , SPEC . peekEntry ( k ) ) ; if ( e != null ) { map . put ( k , e ) ; } } return heapCache . convertCacheEntry2ValueMap ( map ) ;
public class OMMapManagerNew { /** * Removes the file . */ public void removeFile ( final OFileMMap iFile ) { } }
lockManager . acquireLock ( Thread . currentThread ( ) , iFile , OLockManager . LOCK . EXCLUSIVE ) ; try { mapEntrySearchInfo . remove ( iFile ) ; final OMMapBufferEntry [ ] entries = bufferPoolPerFile . remove ( iFile ) ; removeFileEntries ( entries ) ; } finally { lockManager . releaseLock ( Thread . currentThread ( ) , iFile , OLockManager . LOCK . EXCLUSIVE ) ; }
public class CmsUpdateBean { /** * Updates the JDBC driver class names . < p > * Needs to be executed before any database access . < p > */ public void updateDBDriverClassName ( ) { } }
Set < String > keys = new HashSet < String > ( ) ; // replace MySQL JDBC driver class name CmsParameterConfiguration properties = getProperties ( ) ; for ( Entry < String , String > propertyEntry : properties . entrySet ( ) ) { if ( MYSQL_DRIVER_CLASS_OLD . equals ( propertyEntry . getValue ( ) ) ) { keys . add ( propertyEntry . getKey ( ) ) ; } } for ( String key : keys ) { properties . put ( key , MYSQL_DRIVER_CLASS_NEW ) ; } if ( ! keys . isEmpty ( ) ) { saveProperties ( properties , CmsSystemInfo . FILE_PROPERTIES , false , keys ) ; }
public class RemoteBrowserReceiver { /** * / * ( non - Javadoc ) * @ see com . ibm . ws . sib . processor . runtime . SIMPRemoteBrowserReceiverControllable # getExpectedSequenceNumber ( ) */ public long getExpectedSequenceNumber ( ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "getExpectedSequenceNumber" ) ; long expectedSeqNumber = aoBrowserSession . getExpectedSequenceNumber ( ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "getExpectedSequenceNumber" , new Long ( expectedSeqNumber ) ) ; return expectedSeqNumber ;
public class FoldersApi { /** * Gets a list of the envelopes in the specified folder . * Retrieves a list of the envelopes in the specified folder . You can narrow the query by specifying search criteria in the query string parameters . * @ param accountId The external account number ( int ) or account ID Guid . ( required ) * @ param folderId The ID of the folder being accessed . ( required ) * @ return FolderItemsResponse */ public FolderItemsResponse listItems ( String accountId , String folderId ) throws ApiException { } }
return listItems ( accountId , folderId , null ) ;
public class sslcipher { /** * Use this API to delete sslcipher of given name . */ public static base_response delete ( nitro_service client , String ciphergroupname ) throws Exception { } }
sslcipher deleteresource = new sslcipher ( ) ; deleteresource . ciphergroupname = ciphergroupname ; return deleteresource . delete_resource ( client ) ;
public class StmtUtil { /** * Cancel a statement identifiable by a request id * @ param stmtInput input statement * @ throws SFException if there is an internal exception * @ throws SnowflakeSQLException if failed to cancel the statement */ public static void cancel ( StmtInput stmtInput ) throws SFException , SnowflakeSQLException { } }
HttpPost httpRequest = null ; AssertUtil . assertTrue ( stmtInput . serverUrl != null , "Missing server url for statement execution" ) ; AssertUtil . assertTrue ( stmtInput . sql != null , "Missing sql for statement execution" ) ; AssertUtil . assertTrue ( stmtInput . mediaType != null , "Missing media type for statement execution" ) ; AssertUtil . assertTrue ( stmtInput . requestId != null , "Missing request id for statement execution" ) ; AssertUtil . assertTrue ( stmtInput . sessionToken != null , "Missing session token for statement execution" ) ; try { URIBuilder uriBuilder = new URIBuilder ( stmtInput . serverUrl ) ; logger . debug ( "Aborting query: {}" , stmtInput . sql ) ; uriBuilder . setPath ( SF_PATH_ABORT_REQUEST_V1 ) ; uriBuilder . addParameter ( SF_QUERY_REQUEST_ID , UUID . randomUUID ( ) . toString ( ) ) ; httpRequest = new HttpPost ( uriBuilder . build ( ) ) ; /* * The JSON input has two fields : sqlText and requestId */ Map sqlJsonBody = new HashMap < String , Object > ( ) ; sqlJsonBody . put ( "sqlText" , stmtInput . sql ) ; sqlJsonBody . put ( "requestId" , stmtInput . requestId ) ; String json = mapper . writeValueAsString ( sqlJsonBody ) ; logger . debug ( "JSON for cancel request: {}" , json ) ; StringEntity input = new StringEntity ( json , Charset . forName ( "UTF-8" ) ) ; input . setContentType ( "application/json" ) ; httpRequest . setEntity ( input ) ; httpRequest . addHeader ( "accept" , stmtInput . mediaType ) ; httpRequest . setHeader ( SF_HEADER_AUTHORIZATION , SF_HEADER_SNOWFLAKE_AUTHTYPE + " " + SF_HEADER_TOKEN_TAG + "=\"" + stmtInput . sessionToken + "\"" ) ; setServiceNameHeader ( stmtInput , httpRequest ) ; String jsonString = HttpUtil . executeRequest ( httpRequest , SF_CANCELING_RETRY_TIMEOUT_IN_MILLIS , 0 , null ) ; // trace the response if requested logger . debug ( "Json response: {}" , jsonString ) ; JsonNode rootNode = null ; rootNode = mapper . readTree ( jsonString ) ; // raise server side error as an exception if any SnowflakeUtil . checkErrorAndThrowException ( rootNode ) ; } catch ( URISyntaxException | IOException ex ) { logger . error ( "Exception encountered when canceling " + httpRequest , ex ) ; // raise internal exception if this is not a snowflake exception throw new SFException ( ex , ErrorCode . INTERNAL_ERROR , ex . getLocalizedMessage ( ) ) ; }
public class SeleniumVersionExtractor { /** * Returns current selenium version from JAR set in classpath . * @ return Version of Selenium . */ public static String fromClassPath ( ) { } }
Set < String > versions = new HashSet < > ( ) ; try { ClassLoader classLoader = Thread . currentThread ( ) . getContextClassLoader ( ) ; Enumeration < URL > manifests = classLoader . getResources ( "META-INF/MANIFEST.MF" ) ; while ( manifests . hasMoreElements ( ) ) { URL manifestURL = manifests . nextElement ( ) ; try ( InputStream is = manifestURL . openStream ( ) ) { Manifest manifest = new Manifest ( ) ; manifest . read ( is ) ; Attributes buildInfo = manifest . getAttributes ( "Build-Info" ) ; if ( buildInfo != null ) { if ( buildInfo . getValue ( "Selenium-Version" ) != null ) { versions . add ( buildInfo . getValue ( "Selenium-Version" ) ) ; } else { // might be in build - info part if ( manifest . getEntries ( ) != null ) { if ( manifest . getEntries ( ) . containsKey ( "Build-Info" ) ) { final Attributes attributes = manifest . getEntries ( ) . get ( "Build-Info" ) ; if ( attributes . getValue ( "Selenium-Version" ) != null ) { versions . add ( attributes . getValue ( "Selenium-Version" ) ) ; } } } } } } } } catch ( Exception e ) { logger . log ( Level . WARNING , "Exception {0} occurred while resolving selenium version and latest image is going to be used." , e . getMessage ( ) ) ; return SELENIUM_VERSION ; } if ( versions . isEmpty ( ) ) { logger . log ( Level . INFO , "No version of Selenium found in classpath. Using latest image." ) ; return SELENIUM_VERSION ; } String foundVersion = versions . iterator ( ) . next ( ) ; if ( versions . size ( ) > 1 ) { logger . log ( Level . WARNING , "Multiple versions of Selenium found in classpath. Using the first one found {0}." , foundVersion ) ; } return foundVersion ;
public class AbstractSBTCompileMojo { /** * " artifactGAVs " in format " groupId1 : artifactId1 : version1 groupId2 : artifactId2 : version2" */ private void resolveArtifacts ( List < Artifact > artifacts , String artifactGAVs ) throws ArtifactNotFoundException , ArtifactResolutionException { } }
if ( artifactGAVs != null && artifactGAVs . trim ( ) . length ( ) > 0 ) { String [ ] scalacPluginsGAVs = artifactGAVs . trim ( ) . split ( " " ) ; for ( String scalacPluginGAV : scalacPluginsGAVs ) { String [ ] gav = scalacPluginGAV . split ( ":" ) ; String groupId = gav [ 0 ] ; String artifactId = gav [ 1 ] ; String version = gav [ 2 ] ; Artifact scalacPluginArtifact = getResolvedArtifact ( groupId , artifactId , version ) ; if ( scalacPluginArtifact != null ) { artifacts . add ( scalacPluginArtifact ) ; } } }
public class AdGroupAd { /** * Gets the policySummary value for this AdGroupAd . * @ return policySummary * Summary of policy findings for this ad . * < span class = " constraint Selectable " > This field can * be selected using the value " PolicySummary " . < / span > * < span class = " constraint ReadOnly " > This field is read * only and will be ignored when sent to the API . < / span > */ public com . google . api . ads . adwords . axis . v201809 . cm . AdGroupAdPolicySummary getPolicySummary ( ) { } }
return policySummary ;
public class BoundedVersionRange { /** * ( non - Javadoc ) * @ see net . ossindex . version . IVersionRange # contains ( net . ossindex . version . IVersion ) */ @ Override public boolean contains ( IVersion version ) { } }
// This will match both SemanticVersion and FlexibleSemanticVersion if ( version instanceof SemanticVersion ) { return expression . interpret ( ( ( SemanticVersion ) version ) . getVersionImpl ( ) ) ; } throw new IllegalArgumentException ( "Semantic ranges expect semantic versions" ) ;
public class ControllerHandler { /** * Init extractors from controller method . */ protected void initExtractors ( ) { } }
Parameter [ ] parameters = controllerMethod . getParameters ( ) ; extractors = new MethodParameterExtractor [ parameters . length ] ; for ( int i = 0 ; i < parameters . length ; i ++ ) { MethodParameter parameter = new MethodParameter ( controllerMethod , i ) ; MethodParameterExtractor extractor = application . getExtractors ( ) . stream ( ) . filter ( e -> e . isApplicable ( parameter ) ) . findFirst ( ) . orElse ( null ) ; if ( extractor == null ) { throw new PippoRuntimeException ( "Method '{}' parameter {} of type '{}' does not specify a extractor" , LangUtils . toString ( controllerMethod ) , i + 1 , parameter . getParameterType ( ) ) ; } extractors [ i ] = extractor ; }
public class GrafeasV1Beta1Client { /** * Updates the specified note . * < p > Sample code : * < pre > < code > * try ( GrafeasV1Beta1Client grafeasV1Beta1Client = GrafeasV1Beta1Client . create ( ) ) { * NoteName name = NoteName . of ( " [ PROJECT ] " , " [ NOTE ] " ) ; * Note note = Note . newBuilder ( ) . build ( ) ; * FieldMask updateMask = FieldMask . newBuilder ( ) . build ( ) ; * Note response = grafeasV1Beta1Client . updateNote ( name , note , updateMask ) ; * < / code > < / pre > * @ param name The name of the note in the form of ` projects / [ PROVIDER _ ID ] / notes / [ NOTE _ ID ] ` . * @ param note The updated note . * @ param updateMask The fields to update . * @ throws com . google . api . gax . rpc . ApiException if the remote call fails */ public final Note updateNote ( NoteName name , Note note , FieldMask updateMask ) { } }
UpdateNoteRequest request = UpdateNoteRequest . newBuilder ( ) . setName ( name == null ? null : name . toString ( ) ) . setNote ( note ) . setUpdateMask ( updateMask ) . build ( ) ; return updateNote ( request ) ;
public class Command { /** * execute commands in backtrack order */ public boolean backtrack ( Map < String , Command > references ) throws Exception { } }
for ( int i = 0 ; i < arguments . size ( ) ; ++ i ) { Command arg = arguments . elementAt ( i ) ; arg . backtrack ( references ) ; } for ( int i = 0 ; i < operations . size ( ) ; ++ i ) { Command opr = operations . elementAt ( i ) ; opr . backtrack ( references ) ; } if ( status == Command . CHILDREN_FILTERED ) { status = doRun ( references ) ; } if ( status == Command . COMMAND_EXECUTED ) { status = doAfterRun ( references ) ; return ( getStatus ( ) == Command . CHILDREN_PROCESSED ) ; } return false ;
public class BDIImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public Object eGet ( int featureID , boolean resolve , boolean coreType ) { } }
switch ( featureID ) { case AfplibPackage . BDI__INDX_NAME : return getIndxName ( ) ; case AfplibPackage . BDI__TRIPLETS : return getTriplets ( ) ; } return super . eGet ( featureID , resolve , coreType ) ;
public class VFSStoreResource { /** * Sets the current transaction timeout value for this XAResource instance . * @ param _ seconds number of seconds * @ return always < i > true < / i > */ @ Override public boolean setTransactionTimeout ( final int _seconds ) { } }
if ( VFSStoreResource . LOG . isDebugEnabled ( ) ) { VFSStoreResource . LOG . debug ( "setTransactionTimeout (seconds = " + _seconds + ")" ) ; } return true ;
public class MultiIndex { /** * Recursively creates an index starting with the NodeState * < code > node < / code > . * @ param tasks * the queue of existing indexing tasks * @ param node * the current NodeState . * @ param stateMgr * the shared item state manager . * @ param count * the number of nodes already indexed . * @ throws IOException * if an error occurs while writing to the index . * @ throws RepositoryException * if any other error occurs * @ throws InterruptedException * if the task has been interrupted */ private void createIndex ( final Queue < Callable < Void > > tasks , final NodeData node , final ItemDataConsumer stateMgr , final AtomicLong count , final AtomicLong processed ) throws IOException , RepositoryException , InterruptedException { } }
processed . incrementAndGet ( ) ; if ( stopped . get ( ) || Thread . interrupted ( ) ) { throw new InterruptedException ( ) ; } if ( indexingTree . isExcluded ( node ) ) { return ; } executeAndLog ( new AddNode ( getTransactionId ( ) , node . getIdentifier ( ) , true ) ) ; if ( count . incrementAndGet ( ) % 1000 == 0 ) { if ( nodesCount == null ) { LOG . info ( "indexing... {} ({})" , node . getQPath ( ) . getAsString ( ) , new Long ( count . get ( ) ) ) ; } else { DecimalFormat format = new DecimalFormat ( "###.#" ) ; LOG . info ( "indexing... {} ({}%)" , node . getQPath ( ) . getAsString ( ) , format . format ( Math . min ( 100d * processed . get ( ) / nodesCount . get ( ) , 100 ) ) ) ; } } synchronized ( this ) { checkVolatileCommit ( ) ; } List < NodeData > children = null ; try { children = stateMgr . getChildNodesData ( node ) ; } catch ( RepositoryException e ) { LOG . error ( "Error indexing subtree " + node . getQPath ( ) . getAsString ( ) + ". Check JCR consistency. " + e . getMessage ( ) , e ) ; return ; } for ( final NodeData nodeData : children ) { Callable < Void > task = new Callable < Void > ( ) { public Void call ( ) throws Exception { createIndex ( tasks , node , stateMgr , count , nodeData , processed ) ; return null ; } } ; if ( ! tasks . offer ( task ) ) { // All threads have tasks to do so we do it ourself createIndex ( tasks , node , stateMgr , count , nodeData , processed ) ; } }
public class xen_health_monitor_voltage { /** * Use this API to fetch filtered set of xen _ health _ monitor _ voltage resources . * filter string should be in JSON format . eg : " vm _ state : DOWN , name : [ a - z ] + " */ public static xen_health_monitor_voltage [ ] get_filtered ( nitro_service service , String filter ) throws Exception { } }
xen_health_monitor_voltage obj = new xen_health_monitor_voltage ( ) ; options option = new options ( ) ; option . set_filter ( filter ) ; xen_health_monitor_voltage [ ] response = ( xen_health_monitor_voltage [ ] ) obj . getfiltered ( service , option ) ; return response ;
public class JetlangEventDispatcher { /** * Creates a batch subscription to the jetlang memory channel for the ANY * event handler . This method does not require synchronization since we are * using CopyOnWriteArrayList * @ param eventHandler */ protected void addANYHandler ( final EventHandler eventHandler ) { } }
final int eventType = eventHandler . getEventType ( ) ; if ( eventType != Events . ANY ) { LOG . error ( "The incoming handler {} is not of type ANY" , eventHandler ) ; throw new IllegalArgumentException ( "The incoming handler is not of type ANY" ) ; } anyHandler . add ( eventHandler ) ; Callback < List < Event > > eventCallback = createEventCallbackForHandler ( eventHandler ) ; BatchSubscriber < Event > batchEventSubscriber = new BatchSubscriber < Event > ( fiber , eventCallback , 0 , TimeUnit . MILLISECONDS ) ; Disposable disposable = eventQueue . subscribe ( batchEventSubscriber ) ; disposableHandlerMap . put ( eventHandler , disposable ) ;
public class RestAuthenticationEntryPoint { /** * { @ inheritDoc } */ @ Override public void commence ( final HttpServletRequest request , final HttpServletResponse response , final AuthenticationException authException ) throws IOException { } }
// This is invoked when user tries to access a secured REST resource // without supplying any credentials . We should just send a 401 // Unauthorized response because there is no ' login page ' to redirect // to . response . sendError ( HttpServletResponse . SC_UNAUTHORIZED , authException . getMessage ( ) ) ;
public class AuthenticationFilter { /** * Handle BASIC authentication . Delegates this to the container by invoking ' login ' * on the inbound http servlet request object . * @ param credentials the credentials * @ param request the http servlet request * @ param response the http servlet respose * @ param chain the filter chain * @ throws IOException when I / O failure occurs in filter chain * @ throws ServletException when servlet exception occurs during auth */ protected void doBasicAuth ( Creds credentials , HttpServletRequest request , HttpServletResponse response , FilterChain chain ) throws IOException , ServletException { } }
try { if ( credentials . username . equals ( request . getRemoteUser ( ) ) ) { // Already logged in as this user - do nothing . This can happen // in some app servers if the app server processes the BASIC auth // credentials before this filter gets a crack at them . WildFly 8 // works this way , for example ( despite the web . xml not specifying // any login config ! ) . } else if ( request . getRemoteUser ( ) != null ) { // switch user request . logout ( ) ; request . login ( credentials . username , credentials . password ) ; } else { request . login ( credentials . username , credentials . password ) ; } } catch ( Exception e ) { // TODO log this error ? e . printStackTrace ( ) ; sendAuthResponse ( response ) ; return ; } doFilterChain ( request , response , chain , null ) ;
public class AbstractGitFlowMojo { /** * Reads model from Maven project pom . xml . * @ param project * Maven project * @ return Maven model * @ throws MojoFailureException */ private Model readModel ( MavenProject project ) throws MojoFailureException { } }
try { // read pom . xml Model model ; FileReader fileReader = new FileReader ( project . getFile ( ) . getAbsoluteFile ( ) ) ; MavenXpp3Reader mavenReader = new MavenXpp3Reader ( ) ; try { model = mavenReader . read ( fileReader ) ; } finally { if ( fileReader != null ) { fileReader . close ( ) ; } } return model ; } catch ( Exception e ) { throw new MojoFailureException ( "" , e ) ; }
public class ManagedInstancesInner { /** * Updates a managed instance . * @ param resourceGroupName The name of the resource group that contains the resource . You can obtain this value from the Azure Resource Manager API or the portal . * @ param managedInstanceName The name of the managed instance . * @ param parameters The requested managed instance resource state . * @ param serviceCallback the async ServiceCallback to handle successful and failed responses . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the { @ link ServiceFuture } object */ public ServiceFuture < ManagedInstanceInner > beginUpdateAsync ( String resourceGroupName , String managedInstanceName , ManagedInstanceUpdate parameters , final ServiceCallback < ManagedInstanceInner > serviceCallback ) { } }
return ServiceFuture . fromResponse ( beginUpdateWithServiceResponseAsync ( resourceGroupName , managedInstanceName , parameters ) , serviceCallback ) ;
public class LasUtils { /** * Smooths a set of las points through the IDW method . * < p > Note that the values in the original data are changed . * @ param lasPoints the list of points to smooth . * @ param useGround if < code > true < / code > , the ground elev is smoothed instead of the z . * @ param idwBuffer the buffer around the points to consider for smoothing . * @ param pm the monitor . */ @ SuppressWarnings ( "unchecked" ) public static void smoothIDW ( List < LasRecord > lasPoints , boolean useGround , double idwBuffer , IHMProgressMonitor pm ) { } }
List < Coordinate > coordinatesList = new ArrayList < Coordinate > ( ) ; if ( useGround ) { for ( LasRecord dot : lasPoints ) { Coordinate c = new Coordinate ( dot . x , dot . y , dot . groundElevation ) ; coordinatesList . add ( c ) ; } } else { for ( LasRecord dot : lasPoints ) { Coordinate c = new Coordinate ( dot . x , dot . y , dot . z ) ; coordinatesList . add ( c ) ; } } // make triangles tree STRtree pointsTree = new STRtree ( coordinatesList . size ( ) ) ; pm . beginTask ( "Make points tree..." , coordinatesList . size ( ) ) ; for ( Coordinate coord : coordinatesList ) { pointsTree . insert ( new Envelope ( coord ) , coord ) ; pm . worked ( 1 ) ; } pm . done ( ) ; pm . beginTask ( "Interpolate..." , coordinatesList . size ( ) ) ; for ( int i = 0 ; i < coordinatesList . size ( ) ; i ++ ) { Coordinate coord = coordinatesList . get ( i ) ; Envelope env = new Envelope ( coord ) ; env . expandBy ( idwBuffer ) ; List < Coordinate > nearPoints = pointsTree . query ( env ) ; double avg = 0 ; for ( Coordinate coordinate : nearPoints ) { avg += coordinate . z ; } avg = avg / nearPoints . size ( ) ; LasRecord lasRecord = lasPoints . get ( i ) ; if ( useGround ) { lasRecord . groundElevation = avg ; } else { lasRecord . z = avg ; } pm . worked ( 1 ) ; } pm . done ( ) ;
public class CmsXmlContent { /** * Returns all simple type values below a given path . < p > * @ param elementPath the element path * @ param locale the content locale * @ return the simple type values */ public List < I_CmsXmlContentValue > getSimpleValuesBelowPath ( String elementPath , Locale locale ) { } }
List < I_CmsXmlContentValue > result = new ArrayList < I_CmsXmlContentValue > ( ) ; for ( I_CmsXmlContentValue value : getValuesByPath ( elementPath , locale ) ) { if ( value . isSimpleType ( ) ) { result . add ( value ) ; } else { result . addAll ( getAllSimpleSubValues ( value ) ) ; } } return result ;
public class ClusterMetricsContext { /** * init plugins and start event */ public void init ( ) { } }
try { initPlugin ( ) ; } catch ( RuntimeException e ) { LOG . error ( "init metrics plugin error:" , e ) ; System . exit ( - 1 ) ; } pushRefreshEvent ( ) ; pushFlushEvent ( ) ; pushMergeEvent ( ) ; pushUploadEvent ( ) ; pushDiagnosisEvent ( ) ; LOG . info ( "Finish" ) ;
public class Line { /** * Intersect this line with another * @ param other * The other line we should intersect with * @ param limit * True if the collision is limited to the extent of the lines * @ param result * The resulting intersection point if any * @ return True if the lines intersect */ public boolean intersect ( Line other , boolean limit , Vector2f result ) { } }
float dx1 = end . getX ( ) - start . getX ( ) ; float dx2 = other . end . getX ( ) - other . start . getX ( ) ; float dy1 = end . getY ( ) - start . getY ( ) ; float dy2 = other . end . getY ( ) - other . start . getY ( ) ; float denom = ( dy2 * dx1 ) - ( dx2 * dy1 ) ; if ( denom == 0 ) { return false ; } float ua = ( dx2 * ( start . getY ( ) - other . start . getY ( ) ) ) - ( dy2 * ( start . getX ( ) - other . start . getX ( ) ) ) ; ua /= denom ; float ub = ( dx1 * ( start . getY ( ) - other . start . getY ( ) ) ) - ( dy1 * ( start . getX ( ) - other . start . getX ( ) ) ) ; ub /= denom ; if ( ( limit ) && ( ( ua < 0 ) || ( ua > 1 ) || ( ub < 0 ) || ( ub > 1 ) ) ) { return false ; } float u = ua ; float ix = start . getX ( ) + ( u * ( end . getX ( ) - start . getX ( ) ) ) ; float iy = start . getY ( ) + ( u * ( end . getY ( ) - start . getY ( ) ) ) ; result . set ( ix , iy ) ; return true ;
public class NameSpace { /** * Gets the method . * @ param name the name * @ param sig the sig * @ return the method * @ throws UtilEvalError the util eval error * @ see # getMethod ( String , Class [ ] , boolean ) * @ see # getMethod ( String , Class [ ] ) */ public BshMethod getMethod ( final String name , final Class < ? > [ ] sig ) throws UtilEvalError { } }
return this . getMethod ( name , sig , false /* declaredOnly */ ) ;
public class StringUtils { /** * < p > Gets a substring from the specified String avoiding exceptions . < / p > * < p > A negative start position can be used to start / end { @ code n } * characters from the end of the String . < / p > * < p > The returned substring starts with the character in the { @ code start } * position and ends before the { @ code end } position . All position counting is * zero - based - - i . e . , to start at the beginning of the string use * { @ code start = 0 } . Negative start and end positions can be used to * specify offsets relative to the end of the String . < / p > * < p > If { @ code start } is not strictly to the left of { @ code end } , " " * is returned . < / p > * < pre > * StringUtils . substring ( null , * , * ) = null * StringUtils . substring ( " " , * , * ) = " " ; * StringUtils . substring ( " abc " , 0 , 2 ) = " ab " * StringUtils . substring ( " abc " , 2 , 0 ) = " " * StringUtils . substring ( " abc " , 2 , 4 ) = " c " * StringUtils . substring ( " abc " , 4 , 6 ) = " " * StringUtils . substring ( " abc " , 2 , 2 ) = " " * StringUtils . substring ( " abc " , - 2 , - 1 ) = " b " * StringUtils . substring ( " abc " , - 4 , 2 ) = " ab " * < / pre > * @ param str the String to get the substring from , may be null * @ param start the position to start from , negative means * count back from the end of the String by this many characters * @ param end the position to end at ( exclusive ) , negative means * count back from the end of the String by this many characters * @ return substring from start position to end position , * { @ code null } if null String input */ public static String substring ( final String str , int start , int end ) { } }
if ( str == null ) { return null ; } // handle negatives if ( end < 0 ) { end = str . length ( ) + end ; // remember end is negative } if ( start < 0 ) { start = str . length ( ) + start ; // remember start is negative } // check length next if ( end > str . length ( ) ) { end = str . length ( ) ; } // if start is greater than end , return " " if ( start > end ) { return EMPTY ; } if ( start < 0 ) { start = 0 ; } if ( end < 0 ) { end = 0 ; } return str . substring ( start , end ) ;
public class ErrorDetectingWrapper { /** * Gets the next id out of the matcher */ private long extractNextId ( Matcher matcher , String msg ) { } }
if ( ! matcher . find ( ) ) throw new IllegalStateException ( "Facebook changed the error msg for page migration to something unfamiliar. The new msg is: " + msg ) ; String idStr = matcher . group ( ) . substring ( "ID " . length ( ) ) ; return Long . parseLong ( idStr ) ;
public class SuroControl { /** * Writes line - based response . * @ param out the channel used to write back response * @ param response A string that ends with a new line * @ throws IOException */ private void respond ( BufferedWriter out , String response ) throws IOException { } }
out . append ( response ) ; out . append ( "\n" ) ; out . flush ( ) ;
public class Channels { /** * Launches a new JVM with the given classpath and system properties , establish a communication channel , * and return a { @ link Channel } to it . * @ param displayName * Human readable name of what this JVM represents . For example " Selenium grid " or " Hadoop " . * This token is used for messages to { @ code listener } . * @ param listener * The progress of the launcher and the failure information will be sent here . Must not be null . * @ param workDir * If non - null , the new JVM will have this directory as the working directory . This must be a local path . * @ param classpath * The classpath of the new JVM . Can be null if you just need { @ code agent . jar } ( and everything else * can be sent over the channel . ) But if you have jars that are known to be necessary by the new JVM , * setting it here will improve the classloading performance ( by avoiding remote class file transfer . ) * Classes in this classpath will also take precedence over any other classes that ' s sent via the channel * later , so it ' s also useful for making sure you get the version of the classes you want . * @ param systemProperties * If the new JVM should have a certain system properties set . Can be null . * @ return * never null * @ since 1.300 */ public static Channel newJVM ( String displayName , TaskListener listener , FilePath workDir , ClasspathBuilder classpath , Map < String , String > systemProperties ) throws IOException { } }
JVMBuilder vmb = new JVMBuilder ( ) ; vmb . systemProperties ( systemProperties ) ; return newJVM ( displayName , listener , vmb , workDir , classpath ) ;
public class SDVariable { /** * Add a control dependency for this variable on the specified variable . < br > * Control depnedencies can be used to enforce the execution order . * For example , if a control dependency X - > Y exists , then Y will only be executed after X is executed - even * if Y wouldn ' t normally depend on the result / values of X . * @ param controlDependency Control dependency to add for this variable */ public void addControlDependency ( SDVariable controlDependency ) { } }
String cdN = controlDependency . getVarName ( ) ; String n = this . getVarName ( ) ; Variable v = sameDiff . getVariables ( ) . get ( n ) ; if ( v . getControlDeps ( ) == null ) v . setControlDeps ( new ArrayList < String > ( ) ) ; if ( ! v . getControlDeps ( ) . contains ( cdN ) ) v . getControlDeps ( ) . add ( cdN ) ; Variable v2 = sameDiff . getVariables ( ) . get ( cdN ) ; if ( v2 . getControlDepsForVar ( ) == null ) v2 . setControlDepsForVar ( new ArrayList < String > ( ) ) ; if ( ! v2 . getControlDepsForVar ( ) . contains ( n ) ) v2 . getControlDepsForVar ( ) . add ( n ) ;
public class VictimsSqlDB { /** * Internal method implementing search for vulnerabilities checking if the * given { @ link VictimsRecord } ' s contents are a superset of a record in the * victims database . * @ param vr * @ return * @ throws SQLException */ protected HashSet < String > getEmbeddedVulnerabilities ( VictimsRecord vr ) throws SQLException { } }
HashSet < String > cves = new HashSet < String > ( ) ; Set < String > hashes = vr . getHashes ( Algorithms . SHA512 ) . keySet ( ) ; if ( hashes . size ( ) <= 0 ) { return cves ; } for ( Integer id : getEmbeddedRecords ( hashes ) ) { cves . addAll ( getVulnerabilities ( id ) ) ; } return cves ;
public class WTable { /** * Indicates whether the table supports sorting . * @ return true if the table and model both support sorting , false otherwise . */ public boolean isSortable ( ) { } }
// First check global override which turns sorting off if ( getSortMode ( ) == SortMode . NONE ) { return false ; } // Otherwise , the table is sortable if at least one column is sortable . TableModel dataModel = getTableModel ( ) ; final int columnCount = getColumnCount ( ) ; for ( int i = 0 ; i < columnCount ; i ++ ) { if ( dataModel . isSortable ( i ) ) { return true ; } } return false ;
public class WebDavServiceImpl { /** * WebDAV ACL method according to protocol extension - Access Control Protocol : RFC3744 * More details here : < a href = ' http : / / www . webdav . org / specs / rfc3744 . html ' > Web Distributed * Authoring and Versioning ( WebDAV ) Access Control Protocol < / a > * @ param repoName repository name * @ param repoPath path in repository * @ param lockTokenHeader Lock - Token HTTP header * @ param ifHeader If - HTTP Header * @ param body Request body * @ return the instance of javax . ws . rs . core . Response * @ LevelAPI Provisional */ @ ACL @ Path ( "/{repoName}/{repoPath:.*}/" ) public Response acl ( @ PathParam ( "repoName" ) String repoName , @ PathParam ( "repoPath" ) String repoPath , @ HeaderParam ( ExtHttpHeaders . LOCKTOKEN ) String lockTokenHeader , @ HeaderParam ( ExtHttpHeaders . IF ) String ifHeader , HierarchicalProperty body ) { } }
if ( log . isDebugEnabled ( ) ) { log . debug ( "ACL " + repoName + "/" + repoPath ) ; } repoPath = normalizePath ( repoPath ) ; try { List < String > lockTokens = lockTokens ( lockTokenHeader , ifHeader ) ; Session session = session ( repoName , workspaceName ( repoPath ) , lockTokens ) ; return new AclCommand ( ) . acl ( session , path ( repoPath ) , body ) ; } catch ( NoSuchWorkspaceException exc ) { log . error ( "NoSuchWorkspace. " + exc . getMessage ( ) ) ; return Response . status ( HTTPStatus . NOT_FOUND ) . entity ( exc . getMessage ( ) ) . build ( ) ; } catch ( Exception exc ) { log . error ( exc . getMessage ( ) , exc ) ; return Response . status ( HTTPStatus . INTERNAL_ERROR ) . entity ( exc . getMessage ( ) ) . build ( ) ; }
public class TensorShapeProto { /** * < pre > * Dimensions of the tensor , such as { " input " , 30 } , { " output " , 40} * for a 30 x 40 2D tensor . If an entry has size - 1 , this * corresponds to a dimension of unknown size . The names are * optional . * The order of entries in " dim " matters : It indicates the layout of the * values in the tensor in - memory representation . * The first entry in " dim " is the outermost dimension used to layout the * values , the last entry is the innermost dimension . This matches the * in - memory layout of RowMajor Eigen tensors . * If " dim . size ( ) " & gt ; 0 , " unknown _ rank " must be false . * < / pre > * < code > repeated . tensorflow . TensorShapeProto . Dim dim = 2 ; < / code > */ public org . tensorflow . framework . TensorShapeProto . Dim getDim ( int index ) { } }
return dim_ . get ( index ) ;
public class MtasDataLongOperations { /** * ( non - Javadoc ) * @ see * mtas . codec . util . DataCollector . MtasDataOperations # min11 ( java . lang . Number , * java . lang . Number ) */ @ Override public Long min11 ( Long arg1 , Long arg2 ) { } }
if ( arg1 == null || arg2 == null ) { return null ; } else { return Math . min ( arg1 , arg2 ) ; }
public class Spin { /** * Creates a spin wrapper for a data input . The data format of the * input is assumed to be JSON . * @ param input the input to wrap * @ return the spin wrapper for the input * @ throws IllegalArgumentException in case an argument of illegal type is provided ( such as ' null ' ) */ public static SpinJsonNode JSON ( Object input ) { } }
return SpinFactory . INSTANCE . createSpin ( input , DataFormats . json ( ) ) ;
public class VirtualNetworkGatewaysInner { /** * Gets all the connections in a virtual network gateway . * @ param resourceGroupName The name of the resource group . * @ param virtualNetworkGatewayName The name of the virtual network gateway . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ throws CloudException thrown if the request is rejected by server * @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @ return the PagedList & lt ; VirtualNetworkGatewayConnectionListEntityInner & gt ; object if successful . */ public PagedList < VirtualNetworkGatewayConnectionListEntityInner > listConnections ( final String resourceGroupName , final String virtualNetworkGatewayName ) { } }
ServiceResponse < Page < VirtualNetworkGatewayConnectionListEntityInner > > response = listConnectionsSinglePageAsync ( resourceGroupName , virtualNetworkGatewayName ) . toBlocking ( ) . single ( ) ; return new PagedList < VirtualNetworkGatewayConnectionListEntityInner > ( response . body ( ) ) { @ Override public Page < VirtualNetworkGatewayConnectionListEntityInner > nextPage ( String nextPageLink ) { return listConnectionsNextSinglePageAsync ( nextPageLink ) . toBlocking ( ) . single ( ) . body ( ) ; } } ;
public class FieldPredicates { /** * Create a predicate to check that a field has a certain type . * @ param type of the field to check * @ return Predicate to check that a field has a certain type */ public static Predicate < Field > ofType ( Class < ? > type ) { } }
return field -> field . getType ( ) . equals ( type ) ;
public class DefaultCacheManagerService { /** * Reports an { @ link Exception } as an error to the { @ link Reportable } . * The error output logged will use the < tt > location < / tt > and the * exception ' s message if it is not null . * @ param location { @ link String } , the resource location behind the * exception * @ param e { @ link Exception } , the exception to output message from */ private void logExceptionSimple ( String location , Exception e ) { } }
StringBuilder bldr = new StringBuilder ( ) ; bldr . append ( "ERROR UPDATING CACHE" ) ; if ( location != null ) { bldr . append ( " for " ) ; bldr . append ( location ) ; } bldr . append ( "\n\treason: " ) ; final String msg = e . getMessage ( ) ; if ( msg != null ) bldr . append ( msg ) ; else bldr . append ( "Unknown" ) ; bldr . append ( "\n" ) ; reportable . error ( bldr . toString ( ) ) ;
public class SpiderService { /** * Return the store name ( ColumnFamily ) in which terms are stored for the given table . * This name is the object store name appended with " _ terms " . If the object store name * is too long , it is truncated so that the terms store name is less than * { @ link # MAX _ CF _ NAME _ LENGTH } . * @ param tableDef { @ link TableDefinition } of a table . * @ return Store name ( ColumnFamily ) in which terms are stored for the given * table . */ public static String termsStoreName ( TableDefinition tableDef ) { } }
String objStoreName = Utils . truncateTo ( objectsStoreName ( tableDef ) , MAX_CF_NAME_LENGTH - "_Terms" . length ( ) ) ; return objStoreName + "_Terms" ;
public class BDDFactory { /** * Returns all the variables that a given BDD depends on . * @ param bdd the BDD * @ return all the variables that the BDD depends on */ public SortedSet < Variable > support ( final BDD bdd ) { } }
final int supportBDD = this . kernel . support ( bdd . index ( ) ) ; final Assignment assignment = createAssignment ( supportBDD ) ; assert assignment == null || assignment . negativeLiterals ( ) . isEmpty ( ) ; return assignment == null ? new TreeSet < Variable > ( ) : new TreeSet < > ( assignment . positiveLiterals ( ) ) ;
public class BeansToExcelOnTemplate { /** * 根据JavaBean列表 , 向Excel中写入多行 。 * @ param templateCell 模板单元格 。 * @ param items 写入JavaBean列表 。 */ @ SuppressWarnings ( "unchecked" ) private void writeRows ( Cell templateCell , List < Object > items ) { } }
val tmplRow = templateCell . getRow ( ) ; val fromRow = tmplRow . getRowNum ( ) ; val tmplCol = templateCell . getColumnIndex ( ) ; for ( int i = 0 , ii = items . size ( ) ; i < ii ; ++ i ) { val item = items . get ( i ) ; val row = i == 0 ? tmplRow : sheet . createRow ( fromRow + i ) ; if ( row != tmplRow ) { row . setHeight ( tmplRow . getHeight ( ) ) ; } val fields = item . getClass ( ) . getDeclaredFields ( ) ; int cutoff = 0 ; for ( int j = 0 ; j < fields . length ; ++ j ) { val field = fields [ j ] ; if ( Fields . shouldIgnored ( field , ExcelColIgnore . class ) ) { ++ cutoff ; continue ; } if ( field . getName ( ) . endsWith ( "Tmpl" ) ) { ++ cutoff ; continue ; } val fv = Fields . invokeField ( field , item ) ; val excelCell = field . getAnnotation ( ExcelCell . class ) ; int maxLen = excelCell == null ? 0 : excelCell . maxLineLen ( ) ; val cell = newCell ( tmplRow , tmplCol + j - cutoff , i , row , fv , maxLen ) ; applyTemplateCellStyle ( field , item , excelCell , cell ) ; } emptyEndsCells ( tmplRow , tmplCol , i , row , fields . length ) ; }
public class StreamRuleResource { /** * TODO Remove after all consumers have been updated */ @ POST @ Path ( "/{streamRuleId}" ) @ Timed @ Consumes ( MediaType . APPLICATION_JSON ) @ Produces ( MediaType . APPLICATION_JSON ) @ AuditEvent ( type = AuditEventTypes . STREAM_RULE_UPDATE ) @ Deprecated public SingleStreamRuleSummaryResponse updateDeprecated ( @ PathParam ( "streamid" ) String streamid , @ PathParam ( "streamRuleId" ) String streamRuleId , @ Valid @ NotNull CreateStreamRuleRequest cr ) throws NotFoundException , ValidationException { } }
checkNotDefaultStream ( streamid , "Cannot remove stream rule from default stream." ) ; return update ( streamid , streamRuleId , cr ) ;
public class JmfTr { /** * / * ( non - Javadoc ) * @ see com . ibm . ws . sib . utils . ras . SibTr # warning ( com . ibm . websphere . ras . TraceComponent , java . lang . String ) */ public static void warning ( TraceComponent tc , String msgKey ) { } }
if ( isTracing ( ) ) SibTr . warning ( tc , msgKey ) ;
public class Config { /** * Test if configuration object has an attribute with requested name and value . * @ param name name of the attribute to search for , * @ param value attribute value . * @ return true if configuration object has an attribute with requested name and value . * @ throws IllegalArgumentException if < code > name < / code > argument is null or empty . * @ throws IllegalArgumentException if < code > value < / code > argument is null or empty . */ public boolean hasAttribute ( String name , String value ) { } }
Params . notNullOrEmpty ( name , "Attribute name" ) ; Params . notNullOrEmpty ( value , "Attribute value" ) ; return value . equals ( attributes . get ( name ) ) ;
public class ConcurrentHashMap { /** * Builds a set of properties containing the current statistics . * @ return java . util . Map the statistics . */ protected java . util . Map captureStatistics ( ) { } }
if ( Tracing . isAnyTracingEnabled ( ) && trace . isEntryEnabled ( ) ) trace . entry ( this , cclass , "captureStatistics" ) ; java . util . Map statistics = new java . util . HashMap ( ) ; String histogram = " " ; for ( int n = 0 ; n < subMapAccessFrequency . length ; n ++ ) { histogram += subMapAccessFrequency [ n ] + " " ; subMapAccessFrequency [ n ] = 0 ; } statistics . put ( "subMapAccessFrequency" , histogram ) ; if ( Tracing . isAnyTracingEnabled ( ) && trace . isEntryEnabled ( ) ) trace . exit ( this , cclass , "captureStatistics" , new Object [ ] { statistics } ) ; return statistics ;
public class XMLShredder { /** * Add a new element node . * @ param paramLeftSiblingKeyStack * stack used to determine if the new element has to be inserted * as a right sibling or as a new child ( in the latter case is * NULL on top of the stack ) * @ param paramEvent * the current event from the StAX parser * @ return the modified stack * @ throws TTException * if adding { @ link ElementNode } fails */ protected final Stack < Long > addNewElement ( final Stack < Long > paramLeftSiblingKeyStack , final StartElement paramEvent ) throws TTException { } }
assert paramLeftSiblingKeyStack != null && paramEvent != null ; long key ; final QName name = paramEvent . getName ( ) ; if ( mFirstChildAppend == EShredderInsert . ADDASRIGHTSIBLING ) { checkState ( mWtx . getNode ( ) . getKind ( ) != IConstants . ROOT , "Subtree can not be inserted as sibling of Root" ) ; key = mWtx . insertElementAsRightSibling ( name ) ; mFirstChildAppend = EShredderInsert . ADDASFIRSTCHILD ; } else { if ( paramLeftSiblingKeyStack . peek ( ) == NULL_NODE ) { key = mWtx . insertElementAsFirstChild ( name ) ; } else { key = mWtx . insertElementAsRightSibling ( name ) ; } } paramLeftSiblingKeyStack . pop ( ) ; paramLeftSiblingKeyStack . push ( key ) ; paramLeftSiblingKeyStack . push ( NULL_NODE ) ; // Parse namespaces . for ( final Iterator < ? > it = paramEvent . getNamespaces ( ) ; it . hasNext ( ) ; ) { final Namespace namespace = ( Namespace ) it . next ( ) ; mWtx . insertNamespace ( new QName ( namespace . getNamespaceURI ( ) , "" , namespace . getPrefix ( ) ) ) ; mWtx . moveTo ( key ) ; } // Parse attributes . for ( final Iterator < ? > it = paramEvent . getAttributes ( ) ; it . hasNext ( ) ; ) { final Attribute attribute = ( Attribute ) it . next ( ) ; mWtx . insertAttribute ( attribute . getName ( ) , attribute . getValue ( ) ) ; mWtx . moveTo ( key ) ; } return paramLeftSiblingKeyStack ;
public class UserAgentFileParser { /** * Parses a csv stream of rules . * @ param input The input stream * @ param fields The fields that should be stored during parsing * @ return a UserAgentParser based on the read rules * @ throws IOException If reading the stream failed . * @ throws ParseException */ public static UserAgentParser parse ( final Reader input , final Collection < BrowsCapField > fields ) throws IOException , ParseException { } }
return new UserAgentFileParser ( fields ) . parse ( input ) ;
public class AttributeCollector { /** * Method called to ensure hash area will be properly set up in * cases where initially no room was needed , but default attribute ( s ) * is being added . */ private void initHashArea ( ) { } }
/* Let ' s use small hash area of size 4 , and one spill ; don ' t * want too big ( need to clear up room ) , nor too small ( only * collisions ) */ mAttrHashSize = mAttrSpillEnd = 4 ; if ( mAttrMap == null || mAttrMap . length < mAttrHashSize ) { mAttrMap = new int [ mAttrHashSize + 1 ] ; } mAttrMap [ 0 ] = mAttrMap [ 1 ] = mAttrMap [ 2 ] = mAttrMap [ 3 ] = 0 ; allocBuffers ( ) ;
public class CloudSchedulerClient { /** * Gets a job . * < p > Sample code : * < pre > < code > * try ( CloudSchedulerClient cloudSchedulerClient = CloudSchedulerClient . create ( ) ) { * JobName name = JobName . of ( " [ PROJECT ] " , " [ LOCATION ] " , " [ JOB ] " ) ; * Job response = cloudSchedulerClient . getJob ( name . toString ( ) ) ; * < / code > < / pre > * @ param name Required . * < p > The job name . For example : ` projects / PROJECT _ ID / locations / LOCATION _ ID / jobs / JOB _ ID ` . * @ throws com . google . api . gax . rpc . ApiException if the remote call fails */ public final Job getJob ( String name ) { } }
GetJobRequest request = GetJobRequest . newBuilder ( ) . setName ( name ) . build ( ) ; return getJob ( request ) ;
public class JarClassLoader { /** * 加载Jar文件到指定loader中 * @ param loader { @ link URLClassLoader } * @ param jarFile 被加载的jar * @ throws UtilException IO异常包装和执行异常 */ public static void loadJar ( URLClassLoader loader , File jarFile ) throws UtilException { } }
try { final Method method = ClassUtil . getDeclaredMethod ( URLClassLoader . class , "addURL" , URL . class ) ; if ( null != method ) { method . setAccessible ( true ) ; final List < File > jars = loopJar ( jarFile ) ; for ( File jar : jars ) { ReflectUtil . invoke ( loader , method , new Object [ ] { jar . toURI ( ) . toURL ( ) } ) ; } } } catch ( IOException e ) { throw new UtilException ( e ) ; }
public class ModelFactory { /** * Constructor - method to use when service descriptors are required ( e . g . schema and wsdl for services ) * @ param groupId * @ param artifactId * @ param version * @ param service * @ param deploymentModel * @ param serviceDescriptor * @ param operations * @ return the new model instance */ private static IModel doCreateNewModel ( String groupId , String artifactId , String version , String service , MuleVersionEnum muleVersion , DeploymentModelEnum deploymentModel , List < TransportEnum > transports , TransportEnum inboundTransport , TransportEnum outboundTransport , TransformerEnum transformerType , String serviceDescriptor , List < String > operations ) { } }
try { DefaultModelImpl m = ( DefaultModelImpl ) modelClass . newInstance ( ) ; m . initModel ( groupId , artifactId , version , service , muleVersion , deploymentModel , transports , inboundTransport , outboundTransport , transformerType , serviceDescriptor , operations ) ; return m ; } catch ( Exception e ) { throw new RuntimeException ( e ) ; }
public class DefaultWhenFileSystem { /** * Deletes the file represented by the specified { @ code path } , asynchronously . * If the path represents a directory and { @ code recursive = true } then the directory and its contents will be * deleted recursively . * @ param path path to the file * @ param recursive delete recursively ? * @ return a promise for completion */ @ Override public Promise < Void > deleteRecursive ( String path , boolean recursive ) { } }
return adapter . toPromise ( handler -> vertx . fileSystem ( ) . deleteRecursive ( path , recursive , handler ) ) ;
public class GVRRenderData { /** * Get a Rendering { @ link GVRRenderPass Pass } for this Mesh * @ param passIndex The index of the RenderPass to get . */ public GVRRenderPass getPass ( int passIndex ) { } }
if ( passIndex < mRenderPassList . size ( ) ) { return mRenderPassList . get ( passIndex ) ; } else { Log . e ( TAG , "Trying to get invalid pass. Pass " + passIndex + " was not created." ) ; return null ; }
public class Codec { /** * Create a new encoder with specified codec id . * @ param codecId the codec id . * @ return a new encoder . * @ throws JavaAVException if encoder could not be created . */ public static Codec getEncoderById ( CodecID codecId ) throws JavaAVException { } }
if ( codecId == null ) throw new NullPointerException ( "CodecID is null." ) ; AVCodec avCodec = avcodec_find_encoder ( codecId . value ( ) ) ; if ( avCodec == null || avCodec . isNull ( ) ) throw new JavaAVException ( "Encoder not found: " + codecId . toString ( ) ) ; Codec codec = new Codec ( ) ; codec . avCodec = avCodec ; return codec ;
public class TinyPlugzGuice { /** * { @ inheritDoc } * This implementation post - processes each listener before returning it to * inject its fields using this instance ' s injector . */ @ Override protected final Iterator < DeployListener > findDeployListeners ( ClassLoader pluginClassLoader ) { } }
final Iterator < DeployListener > listeners = this . serviceLoader . loadService ( DeployListener . class , pluginClassLoader ) ; return new Iterator < DeployListener > ( ) { @ Override public boolean hasNext ( ) { return listeners . hasNext ( ) ; } @ Override public DeployListener next ( ) { final DeployListener listener = listeners . next ( ) ; TinyPlugzGuice . this . injector . injectMembers ( listener ) ; return listener ; } } ;
public class ServiceDetail { /** * The DNS names for the service . * @ param baseEndpointDnsNames * The DNS names for the service . */ public void setBaseEndpointDnsNames ( java . util . Collection < String > baseEndpointDnsNames ) { } }
if ( baseEndpointDnsNames == null ) { this . baseEndpointDnsNames = null ; return ; } this . baseEndpointDnsNames = new com . amazonaws . internal . SdkInternalList < String > ( baseEndpointDnsNames ) ;
public class CachingGenerator { /** * if ( < cache > ! = null ) return < cache > ; */ private void generateCacheHitReturn ( ) { } }
// stack : generateGetFromCache ( ) ; // stack : < cachedValue > insert ( new InsnNode ( DUP ) ) ; // stack : < cachedValue > : : < cachedValue > LabelNode cacheMissLabel = new LabelNode ( ) ; insert ( new JumpInsnNode ( IFNULL , cacheMissLabel ) ) ; // stack : < cachedValue > insert ( new InsnNode ( ARETURN ) ) ; // stack : < null > insert ( cacheMissLabel ) ; // stack : < null > insert ( new InsnNode ( POP ) ) ; // stack :
public class StructureGenerator { /** * Parse the command line arguments and initialize the data */ private int init ( String [ ] args ) { } }
try { for ( int i = 0 ; i < args . length ; i ++ ) { // parse command line if ( args [ i ] . equals ( "-maxDepth" ) ) { maxDepth = Integer . parseInt ( args [ ++ i ] ) ; if ( maxDepth < 1 ) { System . err . println ( "maxDepth must be positive: " + maxDepth ) ; return - 1 ; } } else if ( args [ i ] . equals ( "-minWidth" ) ) { minWidth = Integer . parseInt ( args [ ++ i ] ) ; if ( minWidth < 0 ) { System . err . println ( "minWidth must be positive: " + minWidth ) ; return - 1 ; } } else if ( args [ i ] . equals ( "-maxWidth" ) ) { maxWidth = Integer . parseInt ( args [ ++ i ] ) ; } else if ( args [ i ] . equals ( "-numOfFiles" ) ) { numOfFiles = Integer . parseInt ( args [ ++ i ] ) ; if ( numOfFiles < 1 ) { System . err . println ( "NumOfFiles must be positive: " + numOfFiles ) ; return - 1 ; } } else if ( args [ i ] . equals ( "-avgFileSize" ) ) { avgFileSize = Double . parseDouble ( args [ ++ i ] ) ; if ( avgFileSize <= 0 ) { System . err . println ( "AvgFileSize must be positive: " + avgFileSize ) ; return - 1 ; } } else if ( args [ i ] . equals ( "-outDir" ) ) { outDir = new File ( args [ ++ i ] ) ; } else if ( args [ i ] . equals ( "-seed" ) ) { r = new Random ( Long . parseLong ( args [ ++ i ] ) ) ; } else { System . err . println ( USAGE ) ; ToolRunner . printGenericCommandUsage ( System . err ) ; return - 1 ; } } } catch ( NumberFormatException e ) { System . err . println ( "Illegal parameter: " + e . getLocalizedMessage ( ) ) ; System . err . println ( USAGE ) ; return - 1 ; } if ( maxWidth < minWidth ) { System . err . println ( "maxWidth must be bigger than minWidth: " + maxWidth ) ; return - 1 ; } if ( r == null ) { r = new Random ( ) ; } return 0 ;
public class CassandraDataHandlerBase { /** * Populates collection field ( s ) into entity . * @ param entity * the entity * @ param thriftColumnValue * the thrift column value * @ param attribute * the attribute */ private void setCollectionValue ( Object entity , Object thriftColumnValue , Attribute attribute ) { } }
try { ByteBuffer valueByteBuffer = ByteBuffer . wrap ( ( byte [ ] ) thriftColumnValue ) ; if ( Collection . class . isAssignableFrom ( ( ( Field ) attribute . getJavaMember ( ) ) . getType ( ) ) ) { Class < ? > genericClass = PropertyAccessorHelper . getGenericClass ( ( Field ) attribute . getJavaMember ( ) ) ; PropertyAccessorHelper . set ( entity , ( Field ) attribute . getJavaMember ( ) , CassandraDataTranslator . decompose ( ( ( Field ) attribute . getJavaMember ( ) ) . getType ( ) , valueByteBuffer , genericClass , true ) ) ; } else if ( ( ( Field ) attribute . getJavaMember ( ) ) . getType ( ) . isAssignableFrom ( Map . class ) ) { List < Class < ? > > mapGenericClasses = PropertyAccessorHelper . getGenericClasses ( ( Field ) attribute . getJavaMember ( ) ) ; PropertyAccessorHelper . set ( entity , ( Field ) attribute . getJavaMember ( ) , CassandraDataTranslator . decompose ( ( ( Field ) attribute . getJavaMember ( ) ) . getType ( ) , valueByteBuffer , mapGenericClasses , true ) ) ; } } catch ( Exception e ) { log . error ( "Error while setting field{} value via CQL, Caused by: ." , attribute . getName ( ) , e ) ; throw new PersistenceException ( e ) ; }
public class CategoryGraph { /** * Computes the paths from each category node to the root . * Computing n paths will take some time . * Thus , efficient computing is based on the assumption that all subpaths in the shortest path to the root , are also shortest paths for the corresponding nodes . * Starting with the leaf nodes gives the longest initial paths with most subpaths . * @ throws WikiApiException Thrown if errors occurred . */ public void createRootPathMap ( ) throws WikiApiException { } }
// do only create rootPathMap , if it was not already computed if ( rootPathMap != null ) { return ; } File rootPathFile = new File ( wiki . getWikipediaId ( ) + "_" + this . rootPathMapFilename ) ; // try to load rootPathMap from precomputed file if ( rootPathFile . exists ( ) ) { logger . info ( "Loading saved rootPathMap ..." ) ; rootPathMap = deserializeMap ( rootPathFile ) ; logger . info ( "Done loading saved rootPathMap" ) ; return ; } logger . info ( "Computing rootPathMap" ) ; rootPathMap = new HashMap < Integer , List < Integer > > ( ) ; // a queue holding the nodes to process List < Integer > queue = new ArrayList < Integer > ( ) ; // initialize the queue with all leaf nodes Set < Integer > leafNodes = this . __getLeafNodes ( ) ; queue . addAll ( leafNodes ) ; logger . info ( queue . size ( ) + " leaf nodes." ) ; fillRootPathMap ( queue ) ; queue . clear ( ) ; // queue should be empty now , but clear anyway // add non - leaf nodes that have not been on a shortest , yet for ( Category cat : wiki . getCategories ( ) ) { if ( ! rootPathMap . containsKey ( cat . getPageId ( ) ) ) { queue . add ( cat . getPageId ( ) ) ; } } logger . info ( queue . size ( ) + " non leaf nodes not on a shortest leaf-node to root path." ) ; fillRootPathMap ( queue ) ; for ( Category cat : wiki . getCategories ( ) ) { if ( ! rootPathMap . containsKey ( cat . getPageId ( ) ) ) { logger . info ( "no path for " + cat . getPageId ( ) ) ; } } // from the root path map , we can very easily get the depth this . depth = getDepthFromRootPathMap ( ) ; logger . info ( "Setting depth of category graph: " + this . depth ) ; logger . info ( "Serializing rootPathMap" ) ; this . serializeMap ( rootPathMap , rootPathFile ) ;
public class BuildServerDataProvider { /** * Get the { @ link BuildServerDataProvider } implementation for the running environment * @ param env environment variables which get used to identify the environment * @ param log logging provider which will be used to log events * @ return the corresponding { @ link BuildServerDataProvider } for your environment or { @ link UnknownBuildServerData } */ public static BuildServerDataProvider getBuildServerProvider ( @ Nonnull Map < String , String > env , @ Nonnull LoggerBridge log ) { } }
if ( BambooBuildServerData . isActiveServer ( env ) ) { return new BambooBuildServerData ( log , env ) ; } if ( GitlabBuildServerData . isActiveServer ( env ) ) { return new GitlabBuildServerData ( log , env ) ; } if ( HudsonJenkinsBuildServerData . isActiveServer ( env ) ) { return new HudsonJenkinsBuildServerData ( log , env ) ; } if ( TeamCityBuildServerData . isActiveServer ( env ) ) { return new TeamCityBuildServerData ( log , env ) ; } if ( TravisBuildServerData . isActiveServer ( env ) ) { return new TravisBuildServerData ( log , env ) ; } return new UnknownBuildServerData ( log , env ) ;