signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class CtClassUtil { /** * 获取方法的参数名称 . * @ param clazz * @ param method * @ return */ public static String [ ] getParameterNames ( Method method ) { } }
// AssertUtil . assertNotNull ( method , " 参数method不能为空 . " ) ; Class < ? > clazz = method . getDeclaringClass ( ) ; return getParameterNames ( clazz , method ) ;
public class Types { /** * Are corresponding elements of ts subtypes of ss ? If lists are * of different length , return false . */ public boolean isSubtypes ( List < Type > ts , List < Type > ss ) { } }
while ( ts . tail != null && ss . tail != null /* inlined : ts . nonEmpty ( ) & & ss . nonEmpty ( ) */ && isSubtype ( ts . head , ss . head ) ) { ts = ts . tail ; ss = ss . tail ; } return ts . tail == null && ss . tail == null ; /* inlined : ts . isEmpty ( ) & & ss . isEmpty ( ) ; */
public class KeyUtil { /** * Returns the key size of the given key object in bits . * @ param key the key object , cannot be null * @ return the key size of the given key object in bits , or - 1 if the * key size is not accessible */ public static final int getKeySize ( Key key ) { } }
int size = - 1 ; if ( key instanceof Length ) { try { Length ruler = ( Length ) key ; size = ruler . length ( ) ; } catch ( UnsupportedOperationException usoe ) { // ignore the exception } if ( size >= 0 ) { return size ; } } // try to parse the length from key specification if ( key instanceof SecretKey ) { SecretKey sk = ( SecretKey ) key ; String format = sk . getFormat ( ) ; if ( "RAW" . equals ( format ) && sk . getEncoded ( ) != null ) { size = ( sk . getEncoded ( ) . length * 8 ) ; } // Otherwise , it may be a unextractable key of PKCS # 11 , or // a key we are not able to handle . } else if ( key instanceof RSAKey ) { RSAKey pubk = ( RSAKey ) key ; size = pubk . getModulus ( ) . bitLength ( ) ; } else if ( key instanceof ECKey ) { ECKey pubk = ( ECKey ) key ; ECParameterSpec params = pubk . getParams ( ) ; // According to RFC 3279 section 2.3.5 , EC keys are allowed // to inherit parameters in an X . 509 certificate issuer ' s // key parameters , so the parameters may be null . The parent // key will be rejected if its parameters don ' t pass , so this // is okay . if ( params != null ) { size = params . getOrder ( ) . bitLength ( ) ; } } else if ( key instanceof DSAKey ) { DSAKey pubk = ( DSAKey ) key ; DSAParams params = pubk . getParams ( ) ; // According to RFC 3279 section 2.3.2 , DSA keys are allowed // to inherit parameters in an X . 509 certificate issuer ' s // key parameters , so the parameters may be null . The parent // key will be rejected if its parameters don ' t pass , so this // is okay . if ( params != null ) { size = params . getP ( ) . bitLength ( ) ; } } else if ( key instanceof DHKey ) { DHKey pubk = ( DHKey ) key ; size = pubk . getParams ( ) . getP ( ) . bitLength ( ) ; } // Otherwise , it may be a unextractable key of PKCS # 11 , or // a key we are not able to handle . return size ;
public class Config { /** * Returns the map event journal config for the given name , creating one * if necessary and adding it to the collection of known configurations . * The configuration is found by matching the configuration name * pattern to the provided { @ code name } without the partition qualifier * ( the part of the name after { @ code ' @ ' } ) . * If no configuration matches , it will create one by cloning the * { @ code " default " } configuration and add it to the configuration * collection . * If there is no default config as well , it will create one and disable * the event journal by default . * This method is intended to easily and fluently create and add * configurations more specific than the default configuration without * explicitly adding it by invoking * { @ link # addEventJournalConfig ( EventJournalConfig ) } . * Because it adds new configurations if they are not already present , * this method is intended to be used before this config is used to * create a hazelcast instance . Afterwards , newly added configurations * may be ignored . * @ param name name of the map event journal config * @ return the map event journal configuration * @ throws ConfigurationException if ambiguous configurations are found * @ see StringPartitioningStrategy # getBaseName ( java . lang . String ) * @ see # setConfigPatternMatcher ( ConfigPatternMatcher ) * @ see # getConfigPatternMatcher ( ) */ public EventJournalConfig getMapEventJournalConfig ( String name ) { } }
return ConfigUtils . getConfig ( configPatternMatcher , mapEventJournalConfigs , name , EventJournalConfig . class , new BiConsumer < EventJournalConfig , String > ( ) { @ Override public void accept ( EventJournalConfig eventJournalConfig , String name ) { eventJournalConfig . setMapName ( name ) ; if ( "default" . equals ( name ) ) { eventJournalConfig . setEnabled ( false ) ; } } } ) ;
public class CommandLineLinker { /** * Performs a link using a command line linker */ public void link ( final CCTask task , final File outputFile , final String [ ] sourceFiles , final CommandLineLinkerConfiguration config ) throws BuildException { } }
final File parentDir = new File ( outputFile . getParent ( ) ) ; String parentPath ; try { parentPath = parentDir . getCanonicalPath ( ) ; } catch ( final IOException ex ) { parentPath = parentDir . getAbsolutePath ( ) ; } String [ ] execArgs = prepareArguments ( task , parentPath , outputFile . getName ( ) , sourceFiles , config ) ; int commandLength = 0 ; for ( final String execArg : execArgs ) { commandLength += execArg . length ( ) + 1 ; } // if command length exceeds maximum // then create a temporary // file containing everything but the command name if ( commandLength >= this . getMaximumCommandLength ( ) ) { try { execArgs = prepareResponseFile ( outputFile , execArgs ) ; } catch ( final IOException ex ) { throw new BuildException ( ex ) ; } } final int retval = runCommand ( task , parentDir , execArgs ) ; // if the process returned a failure code then // throw an BuildException if ( retval != 0 ) { // construct the exception throw new BuildException ( getCommandWithPath ( config ) + " failed with return code " + retval , task . getLocation ( ) ) ; }
public class ConnectionFactoryDefinitionProcessor { /** * ( non - Javadoc ) * @ see com . ibm . wsspi . injectionengine . InjectionProcessor # createInjectionBinding ( java . lang . annotation . Annotation , java . lang . Class , java . lang . reflect . Member , java . lang . String ) */ @ Override public InjectionBinding < ConnectionFactoryDefinition > createInjectionBinding ( ConnectionFactoryDefinition annotation , Class < ? > instanceClass , Member member , String jndiName ) throws InjectionException { } }
InjectionBinding < ConnectionFactoryDefinition > injectionBinding = new ConnectionFactoryDefinitionInjectionBinding ( jndiName , ivNameSpaceConfig ) ; injectionBinding . merge ( annotation , instanceClass , null ) ; return injectionBinding ;
public class SofaRegistry { /** * 反注册服务信息 * @ param appName 应用 * @ param serviceName 服务关键字 * @ param group 服务分组 */ protected void doUnRegister ( String appName , String serviceName , String group ) { } }
SofaRegistryClient . getRegistryClient ( appName , registryConfig ) . unregister ( serviceName , group , RegistryType . PUBLISHER ) ;
public class CadmiumCli { /** * Sets up the ssh configuration that git will use to communicate with the remote git repositories . * @ param noPrompt True if there should be authentication prompts for the users username and password . * If false , the program will fail with an exit code of 1 if not authorized . */ private static void setupSsh ( boolean noPrompt ) { } }
File sshDir = new File ( System . getProperty ( "user.home" ) , ".ssh" ) ; if ( sshDir . exists ( ) ) { GitService . setupLocalSsh ( sshDir . getAbsolutePath ( ) , noPrompt ) ; }
public class XmlParser { /** * Parse an element , with its tags . * < pre > * [ 39 ] element : : = EmptyElementTag | STag content ETag * [ 40 ] STag : : = ' & lt ; ' Name ( S Attribute ) * S ? ' & gt ; ' * [ 44 ] EmptyElementTag : : = ' & lt ; ' Name ( S Attribute ) * S ? ' / & gt ; ' * < / pre > * ( The ' & lt ; ' has already been read . ) * NOTE : this method actually chains onto parseContent ( ) , if necessary , and * parseContent ( ) will take care of calling parseETag ( ) . */ private void parseElement ( boolean maybeGetSubset ) throws Exception { } }
String gi ; char c ; int oldElementContent = currentElementContent ; String oldElement = currentElement ; ElementDecl element ; // This is the ( global ) counter for the // array of specified attributes . tagAttributePos = 0 ; // Read the element type name . gi = readNmtoken ( true ) ; // If we saw no DTD , and this is the document root element , // let the application modify the input stream by providing one . if ( maybeGetSubset ) { InputSource subset = handler . getExternalSubset ( gi , handler . getSystemId ( ) ) ; if ( subset != null ) { String publicId = subset . getPublicId ( ) ; String systemId = subset . getSystemId ( ) ; handler . warn ( "modifying document by adding DTD" ) ; handler . doctypeDecl ( gi , publicId , systemId ) ; pushString ( null , ">" ) ; // NOTE : [ dtd ] is so we say what SAX2 expects , // though it ' s misleading ( subset , not entire dtd ) pushURL ( true , "[dtd]" , new ExternalIdentifiers ( publicId , systemId , null ) , subset . getCharacterStream ( ) , subset . getByteStream ( ) , subset . getEncoding ( ) , false ) ; // Loop until we end up back at ' > ' while ( true ) { doReport = expandPE = true ; skipWhitespace ( ) ; doReport = expandPE = false ; if ( tryRead ( '>' ) ) { break ; } else { expandPE = true ; parseMarkupdecl ( ) ; expandPE = false ; } } // the " > " string isn ' t popped yet if ( inputStack . size ( ) != 1 ) { fatal ( "external subset has unmatched '>'" ) ; } handler . endDoctype ( ) ; } } // Determine the current content type . currentElement = gi ; element = elementInfo . get ( gi ) ; currentElementContent = getContentType ( element , CONTENT_ANY ) ; // Read the attributes , if any . // After this loop , " c " is the closing delimiter . boolean white = tryWhitespace ( ) ; c = readCh ( ) ; while ( ( c != '/' ) && ( c != '>' ) ) { unread ( c ) ; if ( ! white ) { fatal ( "need whitespace between attributes" ) ; } parseAttribute ( gi ) ; white = tryWhitespace ( ) ; c = readCh ( ) ; } // Supply any defaulted attributes . Iterator < String > atts = declaredAttributes ( element ) ; if ( atts != null ) { String aname ; loop : while ( atts . hasNext ( ) ) { aname = atts . next ( ) ; // See if it was specified . for ( int i = 0 ; i < tagAttributePos ; i ++ ) { if ( tagAttributes [ i ] == aname ) { continue loop ; } } // . . . or has a default String value = getAttributeDefaultValue ( gi , aname ) ; if ( value == null ) { continue ; } handler . attribute ( aname , value , false ) ; } } // Figure out if this is a start tag // or an empty element , and dispatch an // event accordingly . switch ( c ) { case '>' : handler . startElement ( gi ) ; parseContent ( ) ; break ; case '/' : require ( '>' ) ; handler . startElement ( gi ) ; handler . endElement ( gi ) ; break ; } // Restore the previous state . currentElement = oldElement ; currentElementContent = oldElementContent ;
public class BaseMessageTransport { /** * Get the external message container for this Internal message . * Typically , the overriding class supplies a default format for the transport type . * < br / > NOTE : The message header from the internal message is copies , but not the message itself . * @ param The internalTrxMessage that I will convert to this external format . * @ return The ( empty ) External message . */ public String getMessageClassName ( TrxMessageHeader trxMessageHeader ) { } }
String strMessageClass = this . getProperty ( trxMessageHeader , TrxMessageHeader . EXTERNAL_MESSAGE_CLASS ) ; String strPackage = ( String ) trxMessageHeader . get ( TrxMessageHeader . BASE_PACKAGE ) ; strMessageClass = ClassServiceUtility . getFullClassName ( strPackage , strMessageClass ) ; return strMessageClass ;
public class AbstractProducerBean { /** * Initializes the bean and its metadata */ @ Override public void internalInitialize ( BeanDeployerEnvironment environment ) { } }
getDeclaringBean ( ) . initialize ( environment ) ; super . internalInitialize ( environment ) ; initPassivationCapable ( ) ;
public class PDBServerPanel { private JPanel getPDBFilePanel ( int pos , JTextField f , JTextField c ) { } }
JPanel panel = new JPanel ( ) ; panel . setBorder ( BorderFactory . createLineBorder ( Color . black ) ) ; JLabel l01 = new JLabel ( "PDB code " ) ; panel . add ( l01 ) ; Box hBox11 = Box . createHorizontalBox ( ) ; JLabel l11 = new JLabel ( pos + ":" ) ; f . setMaximumSize ( new Dimension ( Short . MAX_VALUE , 30 ) ) ; hBox11 . add ( l11 ) ; hBox11 . add ( Box . createVerticalGlue ( ) ) ; hBox11 . add ( f , BorderLayout . CENTER ) ; hBox11 . add ( Box . createVerticalGlue ( ) ) ; panel . add ( hBox11 ) ; Box hBox21 = Box . createHorizontalBox ( ) ; JLabel l21 = new JLabel ( "Chain" + pos + ":" ) ; c . setMaximumSize ( new Dimension ( Short . MAX_VALUE , 30 ) ) ; hBox21 . add ( l21 ) ; hBox21 . add ( Box . createGlue ( ) ) ; hBox21 . add ( c , BorderLayout . CENTER ) ; hBox21 . add ( Box . createGlue ( ) ) ; panel . add ( hBox21 ) ; return panel ;
public class RabbitMqUtils { /** * Configures the connection factory with the right settings . * @ param factory the connection factory * @ param configuration the messaging configuration * @ throws IOException if something went wrong * @ see RabbitMqConstants */ public static void configureFactory ( ConnectionFactory factory , Map < String , String > configuration ) throws IOException { } }
final Logger logger = Logger . getLogger ( RabbitMqUtils . class . getName ( ) ) ; logger . fine ( "Configuring a connection factory for RabbitMQ." ) ; String messageServerIp = configuration . get ( RABBITMQ_SERVER_IP ) ; if ( messageServerIp != null ) { Map . Entry < String , Integer > entry = Utils . findUrlAndPort ( messageServerIp ) ; factory . setHost ( entry . getKey ( ) ) ; if ( entry . getValue ( ) > 0 ) factory . setPort ( entry . getValue ( ) ) ; } factory . setUsername ( configuration . get ( RABBITMQ_SERVER_USERNAME ) ) ; factory . setPassword ( configuration . get ( RABBITMQ_SERVER_PASSWORD ) ) ; // Timeout for connection establishment : 5s factory . setConnectionTimeout ( 5000 ) ; // Configure automatic reconnection factory . setAutomaticRecoveryEnabled ( true ) ; // Recovery interval : 10s factory . setNetworkRecoveryInterval ( 10000 ) ; // Exchanges and so on should be redeclared if necessary factory . setTopologyRecoveryEnabled ( true ) ; // SSL if ( Boolean . parseBoolean ( configuration . get ( RABBITMQ_USE_SSL ) ) ) { logger . fine ( "Connection factory for RabbitMQ: SSL is used." ) ; InputStream clientIS = null ; InputStream storeIS = null ; try { clientIS = new FileInputStream ( configuration . get ( RABBITMQ_SSL_KEY_STORE_PATH ) ) ; storeIS = new FileInputStream ( configuration . get ( RABBITMQ_SSL_TRUST_STORE_PATH ) ) ; char [ ] keyStorePassphrase = configuration . get ( RABBITMQ_SSL_KEY_STORE_PASSPHRASE ) . toCharArray ( ) ; KeyStore ks = KeyStore . getInstance ( getValue ( configuration , RABBITMQ_SSL_KEY_STORE_TYPE , DEFAULT_SSL_KEY_STORE_TYPE ) ) ; ks . load ( clientIS , keyStorePassphrase ) ; String value = getValue ( configuration , RABBITMQ_SSL_KEY_MNGR_FACTORY , DEFAULT_SSL_MNGR_FACTORY ) ; KeyManagerFactory kmf = KeyManagerFactory . getInstance ( value ) ; kmf . init ( ks , keyStorePassphrase ) ; char [ ] trustStorePassphrase = configuration . get ( RABBITMQ_SSL_TRUST_STORE_PASSPHRASE ) . toCharArray ( ) ; KeyStore tks = KeyStore . getInstance ( getValue ( configuration , RABBITMQ_SSL_TRUST_STORE_TYPE , DEFAULT_SSL_TRUST_STORE_TYPE ) ) ; tks . load ( storeIS , trustStorePassphrase ) ; value = getValue ( configuration , RABBITMQ_SSL_TRUST_MNGR_FACTORY , DEFAULT_SSL_MNGR_FACTORY ) ; TrustManagerFactory tmf = TrustManagerFactory . getInstance ( value ) ; tmf . init ( tks ) ; SSLContext c = SSLContext . getInstance ( getValue ( configuration , RABBITMQ_SSL_PROTOCOL , DEFAULT_SSL_PROTOCOL ) ) ; c . init ( kmf . getKeyManagers ( ) , tmf . getTrustManagers ( ) , null ) ; factory . useSslProtocol ( c ) ; } catch ( GeneralSecurityException e ) { throw new IOException ( "SSL configuration for the RabbitMQ factory failed." , e ) ; } finally { Utils . closeQuietly ( storeIS ) ; Utils . closeQuietly ( clientIS ) ; } }
public class RunnableCallableFuture { /** * Start a new daemon thread to call the run ( ) method asynchronously , returning this object as a Future ( and not a * RunnableCallableFuture ) * @ return */ public Future < T > asyncRun ( ) { } }
final Thread t = new Thread ( this ) ; { t . setName ( "AsyncRun for " + this ) ; t . setDaemon ( true ) ; t . start ( ) ; } return this ;
public class Redwood { /** * Hide multiple channels . All other channels will be unaffected . * @ param channels The channels to hide */ public static void hideChannels ( Object ... channels ) { } }
// TODO this could share more code with the other show / hide ( Only ) Channels methods for ( LogRecordHandler handler : handlers ) { if ( handler instanceof VisibilityHandler ) { VisibilityHandler visHandler = ( VisibilityHandler ) handler ; for ( Object channel : channels ) { visHandler . alsoHide ( channel ) ; } } }
public class WebSocket { /** * Find a per - message compression extension from among the agreed extensions . */ private PerMessageCompressionExtension findAgreedPerMessageCompressionExtension ( ) { } }
if ( mAgreedExtensions == null ) { return null ; } for ( WebSocketExtension extension : mAgreedExtensions ) { if ( extension instanceof PerMessageCompressionExtension ) { return ( PerMessageCompressionExtension ) extension ; } } return null ;
public class SimpleNotificationDelegate { /** * Shows a notification for the given push message * @ param extras the message extras */ public void showNotification ( Context context , Bundle extras ) { } }
// parse the title and message from the extras String title = getTitle ( context , extras ) ; String message = getMessage ( context , extras ) ; // if the message is empty , bail if ( message == null ) return ; // create the notification NotificationManager mNotificationManager = ( NotificationManager ) context . getSystemService ( Context . NOTIFICATION_SERVICE ) ; long when = System . currentTimeMillis ( ) ; NotificationCompat . Builder mBuilder = new NotificationCompat . Builder ( context ) . setAutoCancel ( true ) . setSmallIcon ( getIcon ( context ) ) . setContentTitle ( title ) . setContentText ( message ) . setContentIntent ( getContentIntent ( context , extras , when ) ) . setStyle ( new NotificationCompat . BigTextStyle ( ) . bigText ( message ) ) ; // display the notification mNotificationManager . notify ( ( int ) when , mBuilder . build ( ) ) ;
public class Kafka08PartitionDiscoverer { /** * Turn a broker instance into a node instance . * @ param broker broker instance * @ return Node representing the given broker */ private static Node brokerToNode ( Broker broker ) { } }
return new Node ( broker . id ( ) , broker . host ( ) , broker . port ( ) ) ;
public class DiscriminationProcessImpl { /** * @ see * com . ibm . wsspi . channelfw . DiscriminationProcess # discriminate ( com . ibm . wsspi * . channelfw . VirtualConnection , com . ibm . wsspi . channelfw . ConnectionLink , * java . lang . String ) */ @ Override public int discriminate ( VirtualConnection vc , ConnectionLink currentChannel , String inputChannelName ) { } }
Channel channel = null ; String channelName = null ; String matchString = ( inputChannelName + ChannelDataImpl . CHILD_STRING ) ; int result = FAILURE ; // Iterate the channels of the current list . for ( int i = 0 ; channelList != null && i < channelList . length ; i ++ ) { channel = channelList [ i ] ; // Find a channel that starts with the name passed in . // Note : Runtime channels are children channel data objects with names // like name _ CFINTERNAL _ CHILD _ 0 // This is kept hidden from users . channelName = channel . getName ( ) ; if ( channelName != null && channelName . startsWith ( matchString ) ) { // Found the channel . Connect the links . ConnectionLink link = channel . getConnectionLink ( vc ) ; currentChannel . setApplicationCallback ( link ) ; link . setDeviceLink ( currentChannel ) ; result = SUCCESS ; break ; } } return result ;
public class TimedInterface { /** * Creates a new TimedInterface for a given interface < code > ctype < / code > with a concrete class * < code > concrete < / code > and a specific id . The id can be used to distinguish among multiple * objects with the same concrete class . */ @ SuppressWarnings ( "unchecked" ) public static < T > T newProxy ( Class < T > ctype , T concrete , String id ) { } }
final InvocationHandler handler = new TimedHandler < > ( ctype , concrete , id ) ; final Class < ? > [ ] types = new Class [ ] { ctype , CompositeMonitor . class } ; return ( T ) Proxy . newProxyInstance ( ctype . getClassLoader ( ) , types , handler ) ;
public class Instance { /** * Use { @ link # getRedisConfigsMap ( ) } instead . */ @ java . lang . Deprecated public java . util . Map < java . lang . String , java . lang . String > getRedisConfigs ( ) { } }
return getRedisConfigsMap ( ) ;
public class JSPrimitive { /** * Set the typeCode for the type . Doing this automatically sets all the other type * information implied by the type . */ public void setTypeCode ( int typeCode ) { } }
if ( typeCode <= 0 || typeCode >= JSBaseTypes . baseTypes . length ) throw new IllegalArgumentException ( "TypeCode=" + typeCode ) ; this . typeCode = typeCode ; baseType = JSBaseTypes . baseTypes [ typeCode ] ;
public class BagOfTokens { /** * Weight associated with a token : by default , the number of times * the token appears in the bag . */ double getWeight ( Token tok ) { } }
Double f = ( Double ) weightMap . get ( tok ) ; return f == null ? 0 : f . doubleValue ( ) ;
public class FieldListener { /** * Set the mode this listener responds / doesn ' t respond to . * @ param iMoveMode The type of move being done ( init / read / screen ) . * @ param flag True If this listener should respond to this kind of move . */ public void setRespondsToMode ( int iMoveMode , boolean flag ) { } }
if ( iMoveMode == DBConstants . SCREEN_MOVE ) m_bScreenMove = flag ; if ( iMoveMode == DBConstants . INIT_MOVE ) m_bInitMove = flag ; if ( iMoveMode == DBConstants . READ_MOVE ) m_bReadMove = flag ;
public class BDDFactory { /** * Sets the variable order for this factory manually . In this case the number of variables has not to be set manually . * @ param varOrder the variable order . */ public void setVariableOrder ( final Variable ... varOrder ) { } }
this . kernel . setNumberOfVars ( varOrder . length ) ; for ( final Variable lit : varOrder ) { final int idx = this . var2idx . size ( ) ; this . var2idx . put ( lit . variable ( ) , idx ) ; this . idx2var . put ( idx , lit . variable ( ) ) ; }
public class MockMethodAdvice { /** * Handle a method entry hook . * @ param instance instance that is mocked * @ param origin method that contains the hook * @ param arguments arguments to the method * @ return A callable that can be called to get the mocked result or null if the method is not * mocked . */ @ SuppressWarnings ( "unused" ) public Callable < ? > handle ( Object instance , Method origin , Object [ ] arguments ) throws Throwable { } }
InvocationHandlerAdapter interceptor = interceptors . get ( instance ) ; if ( interceptor == null ) { return null ; } return new ReturnValueWrapper ( interceptor . interceptEntryHook ( instance , origin , arguments , new SuperMethodCall ( selfCallInfo , origin , instance , arguments ) ) ) ;
public class SpringApplication { /** * Convenient alternative to { @ link # setDefaultProperties ( Map ) } . * @ param defaultProperties some { @ link Properties } */ public void setDefaultProperties ( Properties defaultProperties ) { } }
this . defaultProperties = new HashMap < > ( ) ; for ( Object key : Collections . list ( defaultProperties . propertyNames ( ) ) ) { this . defaultProperties . put ( ( String ) key , defaultProperties . get ( key ) ) ; }
public class AbstractSphere3F { /** * { @ inheritDoc } */ @ Pure @ Override public double distanceLinf ( Point3D p ) { } }
Point3D r = getClosestPointTo ( p ) ; return r . getDistanceLinf ( p ) ;
public class LabelingJobDataAttributes { /** * Declares that your content is free of personally identifiable information or adult content . Amazon SageMaker may * restrict the Amazon Mechanical Turk workers that can view your task based on this information . * @ param contentClassifiers * Declares that your content is free of personally identifiable information or adult content . Amazon * SageMaker may restrict the Amazon Mechanical Turk workers that can view your task based on this * information . * @ see ContentClassifier */ public void setContentClassifiers ( java . util . Collection < String > contentClassifiers ) { } }
if ( contentClassifiers == null ) { this . contentClassifiers = null ; return ; } this . contentClassifiers = new java . util . ArrayList < String > ( contentClassifiers ) ;
public class PooledByteArray { /** * This method marks this array to be released . * @ return { @ code true } if this array can be released , { @ code false } if there are references left that have to be * released before . */ public boolean release ( ) { } }
if ( this . released ) { // already ( marked as ) released . . . return false ; } this . released = true ; if ( this . childCount == 0 ) { if ( this . parent == null ) { return true ; } else { assert ( this . parent . childCount > 0 ) ; this . parent . childCount -- ; if ( ( this . parent . childCount == 0 ) && ( this . parent . released ) ) { return true ; } } } return false ;
public class ConcurrentMultiCache { /** * Returns the object identified by the specified key / value pair if it is currently * in memory in the cache . Just because this value returns < code > null < / code > does * not mean the object does not exist . Instead , it may be that it is simply not * cached in memory . * @ param key they unique identifier attribute on which you are searching * @ param val the value of the unique identifier on which you are searching * @ return the matching object from the cache */ public T find ( String key , Object val ) { } }
return find ( key , val , null ) ;
public class SplittableElementSet { /** * Make a new splittable set from a collection of VM . * We consider the collection does not have duplicated elements . * @ param c the collection to wrap * @ param idx the partition for each VM * @ return the resulting set */ public static SplittableElementSet < VM > newVMIndex ( Collection < VM > c , TIntIntHashMap idx ) { } }
return new SplittableElementSet < > ( c , idx ) ;
public class Call { /** * Convenience method to dials a call from a phone number to a phone number * @ param to the to number * @ param from the from number * @ param callbackUrl the callback URL * @ param tag the call tag * @ return the call * @ throws IOException unexpected error . */ public static Call create ( final String to , final String from , final String callbackUrl , final String tag ) throws Exception { } }
assert ( to != null && from != null ) ; final Map < String , Object > params = new HashMap < String , Object > ( ) ; params . put ( "to" , to ) ; params . put ( "from" , from ) ; params . put ( "callbackUrl" , callbackUrl ) ; params . put ( "tag" , tag ) ; final Call call = create ( params ) ; return call ;
public class UrlUtils { /** * Gets the absolute filesystem path to the class path root for the specified * resource . The root is either a JAR file or a directory with loose class * files . If the URL does not use a supported protocol , an exception will be * thrown . * @ param url * The URL to the resource , may be < code > null < / code > . * @ param resource * The name of the resource , must not be < code > null < / code > . * @ return The absolute filesystem path to the class path root of the resource * or < code > null < / code > if the input URL was < code > null < / code > . */ public static File getResourceRoot ( final URL url , final String resource ) { } }
String path = null ; if ( url != null ) { final String spec = url . toExternalForm ( ) ; if ( ( JAR_FILE ) . regionMatches ( true , 0 , spec , 0 , JAR_FILE . length ( ) ) ) { URL jar ; try { jar = new URL ( spec . substring ( JAR . length ( ) , spec . lastIndexOf ( "!/" ) ) ) ; } catch ( final MalformedURLException e ) { throw new IllegalArgumentException ( "Invalid JAR URL: " + url + ", " + e . getMessage ( ) ) ; } path = decodeUrl ( jar . getPath ( ) ) ; } else if ( FILE . regionMatches ( true , 0 , spec , 0 , FILE . length ( ) ) ) { path = decodeUrl ( url . getPath ( ) ) ; path = path . substring ( 0 , path . length ( ) - resource . length ( ) ) ; } else { throw new IllegalArgumentException ( "Invalid class path URL: " + url ) ; } } return path != null ? new File ( path ) : null ;
public class Request { /** * A private method that handles reading the Responses from the server . * @ return a { @ link Response } from the server . * @ throws IOException */ private Response readResponse ( ) throws IOException { } }
Response response = new Response ( ) ; response . setResponseCode ( connection . getResponseCode ( ) ) ; response . setResponseMessage ( connection . getResponseMessage ( ) ) ; response . setHeaders ( connection . getHeaderFields ( ) ) ; try { response . setBody ( getStringFromStream ( connection . getInputStream ( ) ) ) ; } catch ( IOException e ) { response . setBody ( getStringFromStream ( connection . getErrorStream ( ) ) ) ; } return response ;
public class AcmoUtil { /** * CSV Escape handling for given string . * @ param str The string will be escaped for CSV format output * @ return Escaped CSV string */ public static String escapeCsvStr ( String str ) { } }
if ( str != null && ! str . equals ( "" ) ) { boolean needQuote = false ; if ( str . contains ( "\"" ) ) { str = str . replaceAll ( "\"" , "\"\"" ) ; needQuote = true ; } if ( ! needQuote && str . contains ( "," ) ) { needQuote = true ; } if ( needQuote ) { str = "\"" + str + "\"" ; } return str ; } else { return "" ; }
public class CloneableBean { /** * Makes a deep bean clone of the object passed in the constructor . * To be used by classes using CloneableBean in a delegation pattern , * @ return a clone of the object bean . * @ throws CloneNotSupportedException thrown if the object bean could not be cloned . */ public static Object beanClone ( Object obj , Set < String > ignoreProperties ) throws CloneNotSupportedException { } }
final Class < ? > clazz = obj . getClass ( ) ; try { final Object clonedBean = clazz . newInstance ( ) ; final List < PropertyDescriptor > propertyDescriptors = BeanIntrospector . getPropertyDescriptorsWithGettersAndSetters ( clazz ) ; for ( final PropertyDescriptor propertyDescriptor : propertyDescriptors ) { final String propertyName = propertyDescriptor . getName ( ) ; final boolean ignoredProperty = ignoreProperties . contains ( propertyName ) ; if ( ! ignoredProperty ) { final Method getter = propertyDescriptor . getReadMethod ( ) ; final Method setter = propertyDescriptor . getWriteMethod ( ) ; Object value = getter . invoke ( obj , NO_PARAMS ) ; if ( value != null ) { value = doClone ( value ) ; setter . invoke ( clonedBean , new Object [ ] { value } ) ; } } } return clonedBean ; } catch ( final CloneNotSupportedException e ) { LOG . error ( "Error while cloning bean" , e ) ; throw e ; } catch ( final Exception e ) { LOG . error ( "Error while cloning bean" , e ) ; throw new CloneNotSupportedException ( "Cannot clone a " + clazz + " object" ) ; }
public class Ifc2x3tc1FactoryImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public String convertIfcCableCarrierSegmentTypeEnumToString ( EDataType eDataType , Object instanceValue ) { } }
return instanceValue == null ? null : instanceValue . toString ( ) ;
public class SchemaManager { /** * Drops the index with the specified name . */ void dropConstraint ( Session session , HsqlName name , boolean cascade ) { } }
Table t = getTable ( session , name . parent . name , name . parent . schema . name ) ; TableWorks tw = new TableWorks ( session , t ) ; tw . dropConstraint ( name . name , cascade ) ;
public class AbstractSerializer { /** * * * * Collection * * * */ protected ISynchronizationPoint < ? extends Exception > serializeCollectionValue ( CollectionContext context , String path , List < SerializationRule > rules ) { } }
ISynchronizationPoint < ? extends Exception > start = startCollectionValue ( context , path , rules ) ; SynchronizationPoint < Exception > result = new SynchronizationPoint < > ( ) ; if ( start . isUnblocked ( ) ) { serializeCollectionElement ( context , context . getIterator ( ) , 0 , path , rules , result ) ; return result ; } start . listenAsyncSP ( new SerializationTask ( ( ) -> { serializeCollectionElement ( context , context . getIterator ( ) , 0 , path , rules , result ) ; } ) , result ) ; return result ;
public class Swift { /** * Create any parent folders if they do not exist , to meet old swift convention . * hubiC requires these objects for the sub - objects to be visible in webapp . As an optimization , we consider that if * folder a / b / c exists , then a / and a / b / also exist so are not checked nor created . * @ param leafFolderPath */ private void createIntermediateFoldersObjects ( CPath leafFolderPath ) throws CStorageException { } }
// We check for folder existence before creation , // as in general leaf folder is likely to already exist . // So we walk from leaf to root : CPath path = leafFolderPath ; List < CPath > parentFolders = new LinkedList < CPath > ( ) ; CFile file ; while ( ! path . isRoot ( ) ) { file = getFile ( path ) ; if ( file != null ) { if ( file . isBlob ( ) ) { // Problem here : clash between folder and blob throw new CInvalidFileTypeException ( file . getPath ( ) , false ) ; } break ; } else { LOGGER . debug ( "Nothing exists at path: {}, will go up" , path ) ; parentFolders . add ( 0 , path ) ; path = path . getParent ( ) ; } } // By now we know which folders to create : if ( ! parentFolders . isEmpty ( ) ) { LOGGER . debug ( "Inexisting parent_folders will be created: {}" , parentFolders ) ; for ( CPath parent : parentFolders ) { LOGGER . debug ( "Creating intermediate folder: {}" , parent ) ; rawCreateFolder ( parent ) ; } }
public class BitFileFilter { /** * Initialize class fields . */ public void init ( Record record , BaseField fldRecordTarget , String fsTarget , BaseField fldTarget ) { } }
m_fldToCompare = fldTarget ; m_fsRecordTarget = fsTarget ; m_fldRecordTarget = fldRecordTarget ; super . init ( record ) ;
public class DraggableSwipeBack { /** * Called when a drawer animation has successfully completed . */ private void completeAnimation ( ) { } }
mScroller . abortAnimation ( ) ; final int finalX = mScroller . getFinalX ( ) ; setOffsetPixels ( finalX ) ; setDrawerState ( finalX == 0 ? STATE_CLOSED : STATE_OPEN ) ; stopLayerTranslation ( ) ;
public class RuleClassifier { /** * Best value of entropy */ public void mainFindBestValEntropy ( Node root ) { } }
if ( root != null ) { DoubleVector parentClassCL = new DoubleVector ( ) ; DoubleVector classCountL = root . classCountsLeft ; // class count left DoubleVector classCountR = root . classCountsRight ; // class count left double numInst = root . classCountsLeft . sumOfValues ( ) + root . classCountsRight . sumOfValues ( ) ; double classCountLSum = root . classCountsLeft . sumOfValues ( ) ; double classCountRSum = root . classCountsRight . sumOfValues ( ) ; double classCountLEntropy = entropy ( classCountL ) ; double classCountREntropy = entropy ( classCountR ) ; this . minEntropyTemp = ( classCountLSum / numInst ) * classCountLEntropy + ( classCountRSum / numInst ) * classCountREntropy ; for ( int f = 0 ; f < root . classCountsLeft . numValues ( ) ; f ++ ) { parentClassCL . setValue ( f , root . classCountsLeft . getValue ( f ) ) ; } findBestValEntropy ( root , classCountL , classCountR , true , this . minEntropyTemp , parentClassCL ) ; }
public class KafkaStandaloneGenerator { /** * Entry point to the kafka data producer . */ public static void main ( String [ ] args ) throws Exception { } }
final KafkaCollector [ ] collectors = new KafkaCollector [ NUM_PARTITIONS ] ; // create the generator threads for ( int i = 0 ; i < collectors . length ; i ++ ) { collectors [ i ] = new KafkaCollector ( BROKER_ADDRESS , TOPIC , i ) ; } StandaloneThreadedGenerator . runGenerator ( collectors ) ;
public class AmazonIdentityManagementClient { /** * Retrieves information about the specified server certificate stored in IAM . * For more information about working with server certificates , see < a * href = " https : / / docs . aws . amazon . com / IAM / latest / UserGuide / id _ credentials _ server - certs . html " > Working with Server * Certificates < / a > in the < i > IAM User Guide < / i > . This topic includes a list of AWS services that can use the server * certificates that you manage with IAM . * @ param getServerCertificateRequest * @ return Result of the GetServerCertificate operation returned by the service . * @ throws NoSuchEntityException * The request was rejected because it referenced a resource entity that does not exist . The error message * describes the resource . * @ throws ServiceFailureException * The request processing has failed because of an unknown error , exception or failure . * @ sample AmazonIdentityManagement . GetServerCertificate * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / iam - 2010-05-08 / GetServerCertificate " target = " _ top " > AWS API * Documentation < / a > */ @ Override public GetServerCertificateResult getServerCertificate ( GetServerCertificateRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeGetServerCertificate ( request ) ;
public class Morpha { /** * Capitalizes the first letter and lower - cases every consecutive letter . */ private static String capitalise ( String s ) { } }
if ( s . length ( ) == 0 ) { return s ; } StringBuilder s1 = new StringBuilder ( s ) ; if ( Character . isLowerCase ( s1 . charAt ( 0 ) ) ) { s1 . setCharAt ( 0 , Character . toUpperCase ( s1 . charAt ( 0 ) ) ) ; } for ( int j = 1 ; j < s1 . length ( ) ; j ++ ) { if ( Character . isUpperCase ( s1 . charAt ( j ) ) ) { s1 . setCharAt ( j , Character . toLowerCase ( s1 . charAt ( j ) ) ) ; } } return s1 . toString ( ) ;
public class SARLValidator { /** * Check the modifiers for the SARL agents . * @ param agent the agent . */ @ Check protected void checkModifiers ( SarlAgent agent ) { } }
this . agentModifierValidator . checkModifiers ( agent , MessageFormat . format ( Messages . SARLValidator_9 , agent . getName ( ) ) ) ;
public class ComputationGraph { /** * Generate the output for all examples / batches in the input iterator , and concatenate them into a single array . * Can only be used with ComputationGraphs with 1 output * @ param iterator Data to pass through the network * @ return output for all examples in the iterator */ public INDArray outputSingle ( DataSetIterator iterator ) { } }
Preconditions . checkArgument ( numOutputArrays == 1 , "Cannot use this method with nets that have more" + " than 1 output array. This network has %s outputs" , numOutputArrays ) ; return output ( iterator ) [ 0 ] ;
public class TreeNode { /** * A helper method for the range search used in the interval intersection query in the tree . * This corresponds to the right branch of the range search , once we find a node , whose * midpoint is contained in the query interval . All intervals in the right subtree of that node * are guaranteed to intersect with the query , if they have an endpoint smaller or equal than * the end of the query interval . Basically , this means that every time we branch to the right * in the binary search , we need to add the whole left subtree to the result set . * @ param node The right child of the node , whose midpoint is contained in the query interval . * @ param query The query interval . * @ param result The set which stores all intervals in the tree , intersecting the query . */ static < T extends Comparable < ? super T > > void rangeQueryRight ( TreeNode < T > node , Interval < T > query , Set < Interval < T > > result ) { } }
while ( node != null ) { if ( query . contains ( node . midpoint ) ) { result . addAll ( node . increasing ) ; if ( node . left != null ) { for ( Interval < T > next : node . left ) result . add ( next ) ; } node = node . right ; } else { for ( Interval < T > next : node . increasing ) { if ( next . isRightOf ( query ) ) break ; result . add ( next ) ; } node = node . left ; } }
public class IntArrays { /** * Counts the number of times value appears in array . */ public static int count ( int [ ] array , int value ) { } }
int count = 0 ; for ( int i = 0 ; i < array . length ; i ++ ) { if ( array [ i ] == value ) { count ++ ; } } return count ;
public class HibernateLayerUtil { /** * Set session factory . * @ param sessionFactory session factory * @ throws HibernateLayerException could not get class metadata for data source */ public void setSessionFactory ( SessionFactory sessionFactory ) throws HibernateLayerException { } }
try { this . sessionFactory = sessionFactory ; if ( null != layerInfo ) { entityMetadata = sessionFactory . getClassMetadata ( layerInfo . getFeatureInfo ( ) . getDataSourceName ( ) ) ; } } catch ( Exception e ) { // NOSONAR throw new HibernateLayerException ( e , ExceptionCode . HIBERNATE_NO_SESSION_FACTORY ) ; }
public class Money { /** * Write members to a MwsWriter . * @ param w * The writer to write to . */ @ Override public void writeFragmentTo ( MwsWriter w ) { } }
w . write ( "CurrencyCode" , currencyCode ) ; w . write ( "Amount" , amount ) ;
public class PrettyTime { /** * Format the given { @ link Duration } object , using the { @ link TimeFormat } specified by the { @ link TimeUnit } contained * within . If the given { @ link Duration } is < code > null < / code > , the current value of * { @ link System # currentTimeMillis ( ) } will be used instead . * @ param duration the { @ link Duration } to be formatted * @ return A formatted string representing { @ code duration } */ public String format ( final Duration duration ) { } }
if ( duration == null ) return format ( now ( ) ) ; TimeFormat format = getFormat ( duration . getUnit ( ) ) ; String time = format . format ( duration ) ; return format . decorate ( duration , time ) ;
public class HtmlDocletWriter { /** * Add the annotatation types for the given doc and parameter . * @ param indent the number of spaces to indent the parameters . * @ param doc the doc to write annotations for . * @ param param the parameter to write annotations for . * @ param tree the content tree to which the annotation types will be added */ public boolean addAnnotationInfo ( int indent , Doc doc , Parameter param , Content tree ) { } }
return addAnnotationInfo ( indent , doc , param . annotations ( ) , false , tree ) ;
public class SnowflakeAzureClient { /** * Re - creates the encapsulated storage client with a fresh access token * @ param stageCredentials a Map ( as returned by GS ) which contains the new credential properties * @ throws SnowflakeSQLException failure to renew the client */ @ Override public void renew ( Map stageCredentials ) throws SnowflakeSQLException { } }
stageInfo . setCredentials ( stageCredentials ) ; setupAzureClient ( stageInfo , encMat ) ;
public class SubscriptionUsagesInner { /** * Gets a subscription usage metric . * @ param locationName The name of the region where the resource is located . * @ param usageName Name of usage metric to return . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ throws CloudException thrown if the request is rejected by server * @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @ return the SubscriptionUsageInner object if successful . */ public SubscriptionUsageInner get ( String locationName , String usageName ) { } }
return getWithServiceResponseAsync ( locationName , usageName ) . toBlocking ( ) . single ( ) . body ( ) ;
public class StoreConfig { /** * Gets a class property via a string property name . * @ param pName - the property name * @ param defaultValue - the default property value * @ return a { @ link Class } property */ public Class < ? > getClass ( String pName , Class < ? > defaultValue ) { } }
String pValue = _properties . getProperty ( pName ) ; return parseClass ( pName , pValue , defaultValue ) ;
public class LabsInner { /** * Delete lab . This operation can take a while to complete . * @ param resourceGroupName The name of the resource group . * @ param labAccountName The name of the lab Account . * @ param labName The name of the lab . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ throws CloudException thrown if the request is rejected by server * @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent */ public void beginDelete ( String resourceGroupName , String labAccountName , String labName ) { } }
beginDeleteWithServiceResponseAsync ( resourceGroupName , labAccountName , labName ) . toBlocking ( ) . single ( ) . body ( ) ;
public class CronetChannelBuilder { /** * Creates a new builder for the given server host , port and CronetEngine . */ public static CronetChannelBuilder forAddress ( String host , int port , CronetEngine cronetEngine ) { } }
Preconditions . checkNotNull ( cronetEngine , "cronetEngine" ) ; return new CronetChannelBuilder ( host , port , cronetEngine ) ;
public class ArrayUtil { /** * 去除 { @ code null } 或者 " " 或者空白字符串 元素 * @ param array 数组 * @ return 处理后的数组 * @ since 3.2.2 */ public static < T extends CharSequence > T [ ] removeBlank ( T [ ] array ) { } }
return filter ( array , new Filter < T > ( ) { @ Override public boolean accept ( T t ) { return false == StrUtil . isBlank ( t ) ; } } ) ;
public class JNvgraph { /** * < pre > * nvGRAPH spectral clustering * given a graph and solver parameters of struct SpectralClusteringParameter , * assign vertices to groups such as * intra - group connections are strong and / or inter - groups connections are weak * using spectral technique . * < / pre > */ public static int nvgraphSpectralClustering ( nvgraphHandle handle , nvgraphGraphDescr graph_descr , long weight_index , SpectralClusteringParameter params , Pointer clustering , Pointer eig_vals , Pointer eig_vects ) { } }
return checkResult ( nvgraphSpectralClusteringNative ( handle , graph_descr , weight_index , params , clustering , eig_vals , eig_vects ) ) ;
public class BinaryJedis { /** * Increment the number stored at field in the hash at key by value . If key does not exist , a new * key holding a hash is created . If field does not exist or holds a string , the value is set to 0 * before applying the operation . Since the value argument is signed you can use this command to * perform both increments and decrements . * The range of values supported by HINCRBY is limited to 64 bit signed integers . * < b > Time complexity : < / b > O ( 1) * @ param key * @ param field * @ param value * @ return Integer reply The new value at field after the increment operation . */ @ Override public Long hincrBy ( final byte [ ] key , final byte [ ] field , final long value ) { } }
checkIsInMultiOrPipeline ( ) ; client . hincrBy ( key , field , value ) ; return client . getIntegerReply ( ) ;
public class AVUser { /** * signUpOrLoginByMobilePhone * @ param mobilePhoneNumber * @ param smsCode * @ param clazz * @ param < T > * @ return */ public static < T extends AVUser > T signUpOrLoginByMobilePhone ( String mobilePhoneNumber , String smsCode , Class < T > clazz ) { } }
return signUpOrLoginByMobilePhoneInBackground ( mobilePhoneNumber , smsCode , clazz ) . blockingSingle ( ) ;
public class Searcher { /** * Resets the helper ' s state . * @ return this { @ link Searcher } for chaining . */ @ NonNull @ SuppressWarnings ( { } }
"WeakerAccess" , "unused" } ) // For library users public Searcher reset ( ) { lastResponsePage = 0 ; lastRequestPage = 0 ; lastResponseId = 0 ; endReached = false ; clearFacetRefinements ( ) ; cancelPendingRequests ( ) ; numericRefinements . clear ( ) ; return this ;
public class Object2IntHashMap { /** * { @ inheritDoc } */ public void clear ( ) { } }
if ( size > 0 ) { Arrays . fill ( keys , null ) ; Arrays . fill ( values , missingValue ) ; size = 0 ; }
public class SftpFileAttributes { /** * Determine whether these attributes refer to a block special file . * @ return boolean */ public boolean isBlock ( ) { } }
if ( permissions != null && ( permissions . longValue ( ) & SftpFileAttributes . S_IFBLK ) == SftpFileAttributes . S_IFBLK ) { return true ; } return false ;
public class Boxing { /** * Transforms any array into an array of { @ code long } . * @ param src source array * @ param srcPos start position * @ param len length * @ return long array */ public static long [ ] unboxLongs ( Object src , int srcPos , int len ) { } }
return unboxLongs ( array ( src ) , srcPos , len ) ;
public class Iteration { /** * Set the current { @ link Iteration } payload . */ public static void setCurrentPayload ( Variables stack , String name , WindupVertexFrame frame ) throws IllegalArgumentException { } }
Map < String , Iterable < ? extends WindupVertexFrame > > vars = stack . peek ( ) ; Iterable < ? extends WindupVertexFrame > existingValue = vars . get ( name ) ; if ( ! ( existingValue == null || existingValue instanceof IterationPayload ) ) { throw new IllegalArgumentException ( "Variable \"" + name + "\" has already been assigned and cannot be used as an " + Iteration . class . getSimpleName ( ) + " variable." ) ; } vars . put ( name , new IterationPayload < > ( frame ) ) ;
public class CommerceOrderPersistenceImpl { /** * Removes all the commerce orders where groupId = & # 63 ; and commerceAccountId = & # 63 ; and orderStatus = & # 63 ; from the database . * @ param groupId the group ID * @ param commerceAccountId the commerce account ID * @ param orderStatus the order status */ @ Override public void removeByG_C_O ( long groupId , long commerceAccountId , int orderStatus ) { } }
for ( CommerceOrder commerceOrder : findByG_C_O ( groupId , commerceAccountId , orderStatus , QueryUtil . ALL_POS , QueryUtil . ALL_POS , null ) ) { remove ( commerceOrder ) ; }
public class KeyManagementServiceClient { /** * Returns metadata for a given [ CryptoKeyVersion ] [ google . cloud . kms . v1 . CryptoKeyVersion ] . * < p > Sample code : * < pre > < code > * try ( KeyManagementServiceClient keyManagementServiceClient = KeyManagementServiceClient . create ( ) ) { * CryptoKeyVersionName name = CryptoKeyVersionName . of ( " [ PROJECT ] " , " [ LOCATION ] " , " [ KEY _ RING ] " , " [ CRYPTO _ KEY ] " , " [ CRYPTO _ KEY _ VERSION ] " ) ; * CryptoKeyVersion response = keyManagementServiceClient . getCryptoKeyVersion ( name . toString ( ) ) ; * < / code > < / pre > * @ param name The [ name ] [ google . cloud . kms . v1 . CryptoKeyVersion . name ] of the * [ CryptoKeyVersion ] [ google . cloud . kms . v1 . CryptoKeyVersion ] to get . * @ throws com . google . api . gax . rpc . ApiException if the remote call fails */ public final CryptoKeyVersion getCryptoKeyVersion ( String name ) { } }
GetCryptoKeyVersionRequest request = GetCryptoKeyVersionRequest . newBuilder ( ) . setName ( name ) . build ( ) ; return getCryptoKeyVersion ( request ) ;
public class Ifc2x3tc1PackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public EEnum getIfcTransformerTypeEnum ( ) { } }
if ( ifcTransformerTypeEnumEEnum == null ) { ifcTransformerTypeEnumEEnum = ( EEnum ) EPackage . Registry . INSTANCE . getEPackage ( Ifc2x3tc1Package . eNS_URI ) . getEClassifiers ( ) . get ( 918 ) ; } return ifcTransformerTypeEnumEEnum ;
public class CommerceVirtualOrderItemLocalServiceBaseImpl { /** * Returns the commerce virtual order item matching the UUID and group . * @ param uuid the commerce virtual order item ' s UUID * @ param groupId the primary key of the group * @ return the matching commerce virtual order item , or < code > null < / code > if a matching commerce virtual order item could not be found */ @ Override public CommerceVirtualOrderItem fetchCommerceVirtualOrderItemByUuidAndGroupId ( String uuid , long groupId ) { } }
return commerceVirtualOrderItemPersistence . fetchByUUID_G ( uuid , groupId ) ;
public class DeeseAntonymEvaluation { /** * Returns the association of the two words on a scale of 0 to 1. * @ return the assocation or { @ code null } if either { @ code word1 } or { @ code * word2 } are not in the semantic space */ protected Double computeAssociation ( SemanticSpace sspace , String word1 , String word2 ) { } }
Vector v1 = sspace . getVector ( word1 ) ; Vector v2 = sspace . getVector ( word2 ) ; if ( v1 == null || v2 == null ) return null ; // Find the ranks of each of the two words to each other double rank1 = findRank ( sspace , word1 , word2 ) ; double rank2 = findRank ( sspace , word2 , word1 ) ; return 2d / ( rank1 + rank2 ) ;
public class Matrix { /** * Set the value of element [ r , c ] . * @ param r the row index * @ param c the column index * @ param value the value * @ throws numbercruncher . MatrixException for an invalid index */ public void set ( int r , int c , double value ) throws MatrixException { } }
if ( ( r < 0 ) || ( r >= nRows ) || ( c < 0 ) || ( c >= nCols ) ) { throw new MatrixException ( MatrixException . INVALID_INDEX ) ; } values [ r ] [ c ] = value ;
public class Rule { /** * / * checks whether the Rule contains a detection point of the same type and threshold * as the detectionPoint parameter */ public boolean typeAndThresholdContainsDetectionPoint ( DetectionPoint detectionPoint ) { } }
for ( DetectionPoint myPoint : getAllDetectionPoints ( ) ) { if ( detectionPoint . typeAndThresholdMatches ( myPoint ) ) { return true ; } } return false ;
public class ByteUtilities { /** * Converts a hex string representation to a byte array . * @ param hex the string holding the hex values * @ return the resulting byte array */ public static byte [ ] asByteArray ( String hex ) { } }
byte [ ] bts = new byte [ hex . length ( ) / 2 ] ; for ( int i = 0 ; i < bts . length ; i ++ ) { bts [ i ] = ( byte ) Integer . parseInt ( hex . substring ( 2 * i , 2 * i + 2 ) , 16 ) ; } return bts ;
public class ComponentCollision { /** * TransformableListener */ @ Override public void notifyTransformed ( Transformable transformable ) { } }
final Collidable collidable = transformable . getFeature ( Collidable . class ) ; final double oldX = transformable . getOldX ( ) ; final double oldY = transformable . getOldY ( ) ; final int oldMinX = ( int ) Math . floor ( oldX / REDUCE_FACTOR ) ; final int oldMinY = ( int ) Math . floor ( oldY / REDUCE_FACTOR ) ; final int oldMaxX = ( int ) Math . floor ( ( oldX + collidable . getMaxWidth ( ) ) / REDUCE_FACTOR ) ; final int oldMaxY = ( int ) Math . floor ( ( oldY + collidable . getMaxHeight ( ) ) / REDUCE_FACTOR ) ; final double x = transformable . getX ( ) ; final double y = transformable . getY ( ) ; final int minX = ( int ) Math . floor ( x / REDUCE_FACTOR ) ; final int minY = ( int ) Math . floor ( y / REDUCE_FACTOR ) ; final int maxX = ( int ) Math . floor ( ( x + collidable . getMaxWidth ( ) ) / REDUCE_FACTOR ) ; final int maxY = ( int ) Math . floor ( ( y + collidable . getMaxHeight ( ) ) / REDUCE_FACTOR ) ; if ( oldMinX != minX || oldMinY != minY || oldMaxX != maxX || oldMaxY != maxY ) { removePoints ( oldMinX , oldMinY , oldMaxX , oldMaxY , collidable ) ; } addPoints ( minX , minY , maxX , maxY , collidable ) ;
public class OnLineStatistics { /** * Adds a data sample the the counts with the provided weight of influence . * @ param x the data value to add * @ param weight the weight to give the value * @ throws ArithmeticException if a negative weight is given */ public void add ( double x , double weight ) { } }
// See http : / / en . wikipedia . org / wiki / Algorithms _ for _ calculating _ variance if ( weight < 0 ) throw new ArithmeticException ( "Can not add a negative weight" ) ; else if ( weight == 0 ) return ; double n1 = n ; n += weight ; double delta = x - mean ; double delta_n = delta * weight / n ; double delta_n2 = delta_n * delta_n ; double term1 = delta * delta_n * n1 ; mean += delta_n ; m4 += term1 * delta_n2 * ( n * n - 3 * n + 3 ) + 6 * delta_n2 * m2 - 4 * delta_n * m3 ; m3 += term1 * delta_n * ( n - 2 ) - 3 * delta_n * m2 ; m2 += weight * delta * ( x - mean ) ; if ( min == null ) min = max = x ; else { min = Math . min ( min , x ) ; max = Math . max ( max , x ) ; }
public class ApplicationLauncherApp { /** * Requests that all active processes be killed . * @ param blockTimeMS If & gt ; 0 then it will block until all processes are killed for the specified number * of milliseconds */ public void killAllProcesses ( long blockTimeMS ) { } }
// remove already dead processes from the GUI SwingUtilities . invokeLater ( new Runnable ( ) { @ Override public void run ( ) { DefaultListModel model = ( DefaultListModel ) processList . getModel ( ) ; for ( int i = model . size ( ) - 1 ; i >= 0 ; i -- ) { ActiveProcess p = ( ActiveProcess ) model . get ( i ) ; removeProcessTab ( p , false ) ; } } } ) ; // kill processes that are already running synchronized ( processes ) { for ( int i = 0 ; i < processes . size ( ) ; i ++ ) { processes . get ( i ) . requestKill ( ) ; } } // block until everything is dead if ( blockTimeMS > 0 ) { long abortTime = System . currentTimeMillis ( ) + blockTimeMS ; while ( abortTime > System . currentTimeMillis ( ) ) { int total = 0 ; synchronized ( processes ) { for ( int i = 0 ; i < processes . size ( ) ; i ++ ) { if ( ! processes . get ( i ) . isActive ( ) ) { total ++ ; } } if ( processes . size ( ) == total ) { break ; } } } }
public class StrUtils { /** * Replace all sub strings ignore case < br / > * replaceIgnoreCase ( " AbcDECd " , " Cd " , " FF " ) = " AbFFEFF " */ public static String replaceIgnoreCase ( String text , String findtxt , String replacetxt ) { } }
if ( text == null ) return null ; String str = text ; if ( findtxt == null || findtxt . length ( ) == 0 ) { return str ; } if ( findtxt . length ( ) > str . length ( ) ) { return str ; } int counter = 0 ; String thesubstr ; while ( ( counter < str . length ( ) ) && ( str . substring ( counter ) . length ( ) >= findtxt . length ( ) ) ) { thesubstr = str . substring ( counter , counter + findtxt . length ( ) ) ; if ( thesubstr . equalsIgnoreCase ( findtxt ) ) { str = str . substring ( 0 , counter ) + replacetxt + str . substring ( counter + findtxt . length ( ) ) ; counter += replacetxt . length ( ) ; } else { counter ++ ; } } return str ;
public class IotHubResourcesInner { /** * Get a list of the consumer groups in the Event Hub - compatible device - to - cloud endpoint in an IoT hub . * Get a list of the consumer groups in the Event Hub - compatible device - to - cloud endpoint in an IoT hub . * @ param nextPageLink The NextLink from the previous successful call to List operation . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the PagedList & lt ; EventHubConsumerGroupInfoInner & gt ; object */ public Observable < Page < EventHubConsumerGroupInfoInner > > listEventHubConsumerGroupsNextAsync ( final String nextPageLink ) { } }
return listEventHubConsumerGroupsNextWithServiceResponseAsync ( nextPageLink ) . map ( new Func1 < ServiceResponse < Page < EventHubConsumerGroupInfoInner > > , Page < EventHubConsumerGroupInfoInner > > ( ) { @ Override public Page < EventHubConsumerGroupInfoInner > call ( ServiceResponse < Page < EventHubConsumerGroupInfoInner > > response ) { return response . body ( ) ; } } ) ;
public class AbstractCsvAnnotationBeanWriter { /** * レコード用の値を検証するValidatorを追加します 。 * @ param validators { @ link CsvValidator } の実装クラスを設定します 。 */ @ SuppressWarnings ( "unchecked" ) public void addValidator ( CsvValidator < T > ... validators ) { } }
this . validators . addAll ( Arrays . asList ( validators ) ) ;
public class H2ONode { /** * InetAddress we use to communicate to this Node . */ public static H2ONode self ( InetAddress local ) { } }
assert H2O . H2O_PORT != 0 ; try { // Figure out which interface matches our IP address List < NetworkInterface > matchingIfs = new ArrayList < > ( ) ; Enumeration < NetworkInterface > netIfs = NetworkInterface . getNetworkInterfaces ( ) ; while ( netIfs . hasMoreElements ( ) ) { NetworkInterface netIf = netIfs . nextElement ( ) ; Enumeration < InetAddress > addrs = netIf . getInetAddresses ( ) ; while ( addrs . hasMoreElements ( ) ) { InetAddress addr = addrs . nextElement ( ) ; if ( addr . equals ( local ) ) { matchingIfs . add ( netIf ) ; break ; } } } switch ( matchingIfs . size ( ) ) { case 0 : H2O . CLOUD_MULTICAST_IF = null ; break ; case 1 : H2O . CLOUD_MULTICAST_IF = matchingIfs . get ( 0 ) ; break ; default : String msg = "Found multiple network interfaces for ip address " + local ; for ( NetworkInterface ni : matchingIfs ) { msg += "\n\t" + ni ; } msg += "\nUsing " + matchingIfs . get ( 0 ) + " for UDP broadcast" ; Log . warn ( msg ) ; H2O . CLOUD_MULTICAST_IF = matchingIfs . get ( 0 ) ; } } catch ( SocketException e ) { throw Log . throwErr ( e ) ; } // Selected multicast interface must support multicast , and be up and running ! try { if ( H2O . CLOUD_MULTICAST_IF != null && ! H2O . CLOUD_MULTICAST_IF . supportsMulticast ( ) ) { Log . info ( "Selected H2O.CLOUD_MULTICAST_IF: " + H2O . CLOUD_MULTICAST_IF + " doesn't support multicast" ) ; // H2O . CLOUD _ MULTICAST _ IF = null ; } if ( H2O . CLOUD_MULTICAST_IF != null && ! H2O . CLOUD_MULTICAST_IF . isUp ( ) ) { throw new RuntimeException ( "Selected H2O.CLOUD_MULTICAST_IF: " + H2O . CLOUD_MULTICAST_IF + " is not up and running" ) ; } } catch ( SocketException e ) { throw Log . throwErr ( e ) ; } return intern ( new H2Okey ( local , H2O . H2O_PORT ) , H2O . calculateNodeTimestamp ( ) ) ;
public class ns_image { /** * < pre > * Converts API response of bulk operation into object and returns the object array in case of get request . * < / pre > */ protected base_resource [ ] get_nitro_bulk_response ( nitro_service service , String response ) throws Exception { } }
ns_image_responses result = ( ns_image_responses ) service . get_payload_formatter ( ) . string_to_resource ( ns_image_responses . class , response ) ; if ( result . errorcode != 0 ) { if ( result . errorcode == SESSION_NOT_EXISTS ) service . clear_session ( ) ; throw new nitro_exception ( result . message , result . errorcode , ( base_response [ ] ) result . ns_image_response_array ) ; } ns_image [ ] result_ns_image = new ns_image [ result . ns_image_response_array . length ] ; for ( int i = 0 ; i < result . ns_image_response_array . length ; i ++ ) { result_ns_image [ i ] = result . ns_image_response_array [ i ] . ns_image [ 0 ] ; } return result_ns_image ;
public class AcpOrd { /** * < p > It half - checks items . Order can has items from several places . < / p > * @ param pRqVs additional request scoped parameters * @ param pOrds S . E . orders * @ return consolidated order with bookable items * @ throws Exception - an exception if checking fail */ public final CuOrSe checkSe1 ( final Map < String , Object > pRqVs , final List < CuOrSe > pOrds ) throws Exception { } }
StringBuffer ordIds = null ; for ( CuOrSe co : pOrds ) { co . setGoods ( new ArrayList < CuOrSeGdLn > ( ) ) ; co . setServs ( new ArrayList < CuOrSeSrLn > ( ) ) ; if ( ordIds == null ) { ordIds = new StringBuffer ( ) ; ordIds . append ( co . getItsId ( ) . toString ( ) ) ; } else { ordIds . append ( "," + co . getItsId ( ) ) ; } } Set < String > ndFlNm = new HashSet < String > ( ) ; ndFlNm . add ( "itsId" ) ; ndFlNm . add ( "itsName" ) ; Set < String > ndFl = new HashSet < String > ( ) ; ndFl . add ( "itsId" ) ; ndFl . add ( "itsOwner" ) ; ndFl . add ( "itsName" ) ; ndFl . add ( "good" ) ; ndFl . add ( "uom" ) ; ndFl . add ( "quant" ) ; ndFl . add ( "price" ) ; ndFl . add ( "tot" ) ; ndFl . add ( "totTx" ) ; String tbn = CuOrSeGdLn . class . getSimpleName ( ) ; String tbnUom = UnitOfMeasure . class . getSimpleName ( ) ; pRqVs . put ( tbn + "neededFields" , ndFl ) ; pRqVs . put ( tbn + "gooddeepLevel" , 1 ) ; pRqVs . put ( tbn + "itsOwnerdeepLevel" , 1 ) ; pRqVs . put ( tbnUom + "neededFields" , ndFlNm ) ; List < CuOrSeGdLn > allGoods = new ArrayList < CuOrSeGdLn > ( ) ; List < CuOrSeSrLn > allServs = new ArrayList < CuOrSeSrLn > ( ) ; String quer = lazyGetQuOrGdChk ( ) . replace ( ":TORLN" , "CUORSEGDLN" ) . replace ( ":TITPL" , "SEGOODSPLACE" ) . replace ( ":ORIDS" , ordIds . toString ( ) ) ; List < CuOrSeGdLn > allGds = this . srvOrm . retrieveListByQuery ( pRqVs , CuOrSeGdLn . class , quer ) ; for ( CuOrSeGdLn gl : allGds ) { if ( gl . getQuant ( ) . compareTo ( BigDecimal . ZERO ) == 0 ) { throw new Exception ( "S.E.Good is not available #" + gl . getGood ( ) . getItsId ( ) ) ; } } for ( CuOrSeGdLn gl : allGds ) { for ( CuOrSe co : pOrds ) { if ( co . getItsId ( ) . equals ( gl . getItsOwner ( ) . getItsId ( ) ) ) { gl . setItsOwner ( co ) ; co . getGoods ( ) . add ( gl ) ; } } CuOrSeGdLn cgl = new CuOrSeGdLn ( ) ; cgl . setItsId ( gl . getItsId ( ) ) ; cgl . setGood ( gl . getGood ( ) ) ; cgl . setQuant ( gl . getQuant ( ) ) ; allGoods . add ( cgl ) ; } pRqVs . remove ( tbn + "gooddeepLevel" ) ; pRqVs . remove ( tbn + "neededFields" ) ; pRqVs . remove ( tbn + "itsOwnerdeepLevel" ) ; ndFl . remove ( "good" ) ; ndFl . add ( "service" ) ; ndFl . add ( "dt1" ) ; ndFl . add ( "dt2" ) ; tbn = CuOrSeSrLn . class . getSimpleName ( ) ; pRqVs . put ( tbn + "neededFields" , ndFl ) ; pRqVs . put ( tbn + "servicedeepLevel" , 1 ) ; pRqVs . put ( tbn + "itsOwnerdeepLevel" , 1 ) ; // non - bookable service checkout and bookable services half - checkout : quer = lazyGetQuOrSrChk ( ) . replace ( ":TORLN" , "CUORSESRLN" ) . replace ( ":TITPL" , "SESERVICEPLACE" ) . replace ( ":ORIDS" , ordIds . toString ( ) ) ; List < CuOrSeSrLn > allSrvs = this . srvOrm . retrieveListByQuery ( pRqVs , CuOrSeSrLn . class , quer ) ; for ( CuOrSeSrLn sl : allSrvs ) { if ( sl . getQuant ( ) . compareTo ( BigDecimal . ZERO ) == 0 ) { throw new Exception ( "Service is not available #" + sl . getService ( ) . getItsId ( ) ) ; } } for ( CuOrSeSrLn sl : allSrvs ) { for ( CuOrSe co : pOrds ) { if ( co . getItsId ( ) . equals ( sl . getItsOwner ( ) . getItsId ( ) ) ) { sl . setItsOwner ( co ) ; co . getServs ( ) . add ( sl ) ; } } CuOrSeSrLn csl = new CuOrSeSrLn ( ) ; csl . setItsId ( sl . getItsId ( ) ) ; csl . setService ( sl . getService ( ) ) ; csl . setQuant ( sl . getQuant ( ) ) ; csl . setDt1 ( sl . getDt1 ( ) ) ; csl . setDt2 ( sl . getDt2 ( ) ) ; allServs . add ( csl ) ; } pRqVs . remove ( tbn + "servicedeepLevel" ) ; pRqVs . remove ( tbn + "neededFields" ) ; pRqVs . remove ( tbn + "itsOwnerdeepLevel" ) ; pRqVs . remove ( tbnUom + "neededFields" ) ; CuOrSe cor = new CuOrSe ( ) ; cor . setGoods ( allGoods ) ; cor . setServs ( allServs ) ; return cor ;
public class Topicspace { /** * / * ( non - Javadoc ) * @ see com . ibm . ws . sib . processor . runtime . SIMPTopicSpaceControllable # deleteLocalSubscriptionControlByID ( java . lang . String ) */ public void deleteLocalSubscriptionControlByID ( String id ) throws SIMPInvalidRuntimeIDException , SIMPControllableNotFoundException , SIMPException , SIDurableSubscriptionNotFoundException , SIDestinationLockedException , SIResourceException , SIIncorrectCallException { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "deleteLocalSubscriptionControlByID" , id ) ; assertMessageHandlerNotCorrupt ( ) ; ControllableSubscription sub = getSubscription ( id ) ; if ( ! sub . isDurable ( ) ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "deleteLocalSubscriptionControlByID" , "SIMPIncorrectCallException" ) ; throw new SIMPIncorrectCallException ( nls . getFormattedMessage ( "SUBSCRIPTION_DELETE_ERROR_CWSIP0272" , new Object [ ] { id } , null ) ) ; } HashMap durableSubs = destinationManager . getDurableSubscriptionsTable ( ) ; synchronized ( durableSubs ) { // Look up the consumer dispatcher for this subId in the system durable subs list ConsumerDispatcher cd = ( ConsumerDispatcher ) durableSubs . get ( sub . getConsumerDispatcherState ( ) . getSubscriberID ( ) ) ; // Does the subscription exist , if it doesn ' t , throw a // SIDestinationNotFoundException if ( cd == null ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "deleteSubscription" , "SIMPControllableNotFoundException" ) ; throw new SIMPControllableNotFoundException ( nls . getFormattedMessage ( "SUBSCRIPTION_DOESNT_EXIST_ERROR_CWSIP0146" , new Object [ ] { sub . getConsumerDispatcherState ( ) . getSubscriberID ( ) , messageProcessor . getMessagingEngineName ( ) } , null ) ) ; } // Obtain the destination from the queueing points DestinationHandler destination = cd . getDestination ( ) ; // Call the deleteDurableSubscription method on the destination // NOTE : this assumes the durable subscription is always local destination . deleteDurableSubscription ( sub . getConsumerDispatcherState ( ) . getSubscriberID ( ) , messageProcessor . getMessagingEngineName ( ) ) ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "deleteLocalSubscriptionControlByID" ) ;
public class Msg { /** * Puts a string into the message , prefixed with its length . * Users shall size the message by adding 1 to the length of the string : * It needs to be able to accommodate ( data . length + 1 ) more bytes . * @ param data a string shorter than 256 characters . If null , defaults to a no - op . * @ return the same message . */ public Msg putShortString ( String data ) { } }
if ( data == null ) { return this ; } ByteBuffer dup = buf . duplicate ( ) ; dup . position ( writeIndex ) ; writeIndex += Wire . putShortString ( dup , data ) ; return this ;
public class XMLStreamEventsAsync { /** * Shortcut to move forward to the next START _ ELEMENT . */ public ISynchronizationPoint < Exception > nextStartElement ( ) { } }
ISynchronizationPoint < Exception > next = next ( ) ; if ( next . isUnblocked ( ) ) { if ( next . hasError ( ) ) return next ; if ( Type . START_ELEMENT . equals ( event . type ) ) return next ; return nextStartElement ( ) ; } SynchronizationPoint < Exception > sp = new SynchronizationPoint < > ( ) ; next . listenInline ( ( ) -> { if ( Type . START_ELEMENT . equals ( event . type ) ) { sp . unblock ( ) ; return ; } new Next ( sp ) { @ Override protected void onNext ( ) { if ( Type . START_ELEMENT . equals ( event . type ) ) sp . unblock ( ) ; else nextStartElement ( ) . listenInline ( sp ) ; } } . start ( ) ; } , sp ) ; return sp ;
public class WhiteBlackList { /** * Quote list . * @ param coll * the coll * @ param buf * the buf */ private static void quoteList ( final Collection < String > coll , final StringBuilder buf ) { } }
buf . append ( '[' ) ; boolean first = true ; for ( final String item : coll ) { if ( first ) { first = false ; } else { buf . append ( ", " ) ; } buf . append ( '"' ) ; for ( int i = 0 ; i < item . length ( ) ; i ++ ) { final char c = item . charAt ( i ) ; if ( c == '"' ) { buf . append ( "\\\"" ) ; } else { buf . append ( c ) ; } } buf . append ( '"' ) ; } buf . append ( ']' ) ;
public class DirectConnectGatewayAssociationProposal { /** * The existing Amazon VPC prefixes advertised to the Direct Connect gateway . * @ param existingAllowedPrefixesToDirectConnectGateway * The existing Amazon VPC prefixes advertised to the Direct Connect gateway . */ public void setExistingAllowedPrefixesToDirectConnectGateway ( java . util . Collection < RouteFilterPrefix > existingAllowedPrefixesToDirectConnectGateway ) { } }
if ( existingAllowedPrefixesToDirectConnectGateway == null ) { this . existingAllowedPrefixesToDirectConnectGateway = null ; return ; } this . existingAllowedPrefixesToDirectConnectGateway = new com . amazonaws . internal . SdkInternalList < RouteFilterPrefix > ( existingAllowedPrefixesToDirectConnectGateway ) ;
public class BackupManagerImpl { /** * { @ inheritDoc } */ public void restore ( RepositoryBackupChainLog log , RepositoryEntry repositoryEntry , boolean asynchronous , boolean removeJobOnceOver ) throws BackupOperationException , BackupConfigurationException , RepositoryException , RepositoryConfigurationException { } }
if ( repositoryEntry == null ) { if ( log . getOriginalRepositoryEntry ( ) == null ) { throw new RepositoryRestoreExeption ( "The backup log is not contains original repository log : " + log . getLogFilePath ( ) ) ; } this . restore ( log , log . getOriginalRepositoryEntry ( ) , asynchronous ) ; return ; } this . restoreRepository ( log , repositoryEntry , null , asynchronous , removeJobOnceOver ) ;
public class AdminSchedulerAction { @ Override protected void setupHtmlData ( final ActionRuntime runtime ) { } }
super . setupHtmlData ( runtime ) ; runtime . registerData ( "helpLink" , systemHelper . getHelpLink ( fessConfig . getOnlineHelpNameScheduler ( ) ) ) ;
public class MetricsClient { /** * Creates or updates a logs - based metric . * < p > Sample code : * < pre > < code > * try ( MetricsClient metricsClient = MetricsClient . create ( ) ) { * MetricName metricName = ProjectMetricName . of ( " [ PROJECT ] " , " [ METRIC ] " ) ; * LogMetric metric = LogMetric . newBuilder ( ) . build ( ) ; * LogMetric response = metricsClient . updateLogMetric ( metricName , metric ) ; * < / code > < / pre > * @ param metricName The resource name of the metric to update : * < p > " projects / [ PROJECT _ ID ] / metrics / [ METRIC _ ID ] " * < p > The updated metric must be provided in the request and it ' s ` name ` field must be the * same as ` [ METRIC _ ID ] ` If the metric does not exist in ` [ PROJECT _ ID ] ` , then a new metric is * created . * @ param metric The updated metric . * @ throws com . google . api . gax . rpc . ApiException if the remote call fails */ public final LogMetric updateLogMetric ( MetricName metricName , LogMetric metric ) { } }
UpdateLogMetricRequest request = UpdateLogMetricRequest . newBuilder ( ) . setMetricName ( metricName == null ? null : metricName . toString ( ) ) . setMetric ( metric ) . build ( ) ; return updateLogMetric ( request ) ;
public class SecurityInterceptor { /** * Intercept execution for event handling . Checks if the security manager allows access before allowing the event . * @ param executionContext the context of the execution being intercepted * @ return the resulting { @ link net . sourceforge . stripes . action . Resolution } ; returns { @ link ExecutionContext # proceed ( ) } if all is well * @ throws Exception on error */ protected Resolution interceptEventHandling ( ExecutionContext executionContext ) throws Exception { } }
// Before handling the event , check if access is allowed . // If not explicitly allowed , access is denied . Resolution resolution ; if ( Boolean . TRUE . equals ( getAccessAllowed ( executionContext ) ) ) { resolution = executionContext . proceed ( ) ; } else { LOG . debug ( "The security manager has denied access." ) ; resolution = handleAccessDenied ( executionContext . getActionBean ( ) , executionContext . getHandler ( ) ) ; } return resolution ;
public class HttpMessage { /** * Set a request attribute . * @ param name Attribute name * @ param attribute Attribute value * @ return Previous Attribute value */ public Object setAttribute ( String name , Object attribute ) { } }
if ( _attributes == null ) _attributes = new HashMap ( 11 ) ; return _attributes . put ( name , attribute ) ;
public class MavenModelScannerPlugin { /** * Adds information about plugin executions . * @ param mavenPluginDescriptor * The descriptor for the plugin . * @ param plugin * The Plugin . * @ param store * The database . */ private void addPluginExecutions ( MavenPluginDescriptor mavenPluginDescriptor , Plugin plugin , Store store ) { } }
List < PluginExecution > executions = plugin . getExecutions ( ) ; for ( PluginExecution pluginExecution : executions ) { MavenPluginExecutionDescriptor executionDescriptor = store . create ( MavenPluginExecutionDescriptor . class ) ; executionDescriptor . setId ( pluginExecution . getId ( ) ) ; executionDescriptor . setPhase ( pluginExecution . getPhase ( ) ) ; executionDescriptor . setInherited ( pluginExecution . isInherited ( ) ) ; mavenPluginDescriptor . getExecutions ( ) . add ( executionDescriptor ) ; addExecutionGoals ( executionDescriptor , pluginExecution , store ) ; addConfiguration ( executionDescriptor , ( Xpp3Dom ) pluginExecution . getConfiguration ( ) , store ) ; }
public class JtsBinaryParser { /** * Parse the given { @ link org . postgis . binary . ValueGetter } into a JTS * { @ link org . locationtech . jts . geom . MultiPolygon } . * @ param data { @ link org . postgis . binary . ValueGetter } to parse . * @ param srid SRID of the parsed geometries . * @ return The parsed { @ link org . locationtech . jts . geom . MultiPolygon } . */ private MultiPolygon parseMultiPolygon ( ValueGetter data , int srid ) { } }
int count = data . getInt ( ) ; Polygon [ ] polys = new Polygon [ count ] ; this . parseGeometryArray ( data , polys , srid ) ; return JtsGeometry . geofac . createMultiPolygon ( polys ) ;
public class WebGroup { /** * Method getMimeType . * @ param file * @ return String */ public String getMimeType ( String withDot , String withoutDot ) { } }
return ( ( VirtualHost ) parent ) . getMimeType ( withDot , withoutDot ) ;
public class Template { /** * Set the attributes for this template . * @ param attributes the attribute map */ public final void setAttributes ( final Map < String , Attribute > attributes ) { } }
for ( Map . Entry < String , Attribute > entry : attributes . entrySet ( ) ) { Object attribute = entry . getValue ( ) ; if ( ! ( attribute instanceof Attribute ) ) { final String msg = "Attribute: '" + entry . getKey ( ) + "' is not an attribute. It is a: " + attribute ; LOGGER . error ( "Error setting the Attributes: {}" , msg ) ; throw new IllegalArgumentException ( msg ) ; } else { ( ( Attribute ) attribute ) . setConfigName ( entry . getKey ( ) ) ; } } this . attributes = attributes ;