signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class WebUtils { /** * Put initial http request post parameters . * @ param context the context */ public static void putInitialHttpRequestPostParameters ( final RequestContext context ) { } }
val request = getHttpServletRequestFromExternalWebflowContext ( context ) ; context . getFlashScope ( ) . put ( "httpRequestInitialPostParameters" , request . getParameterMap ( ) ) ;
public class MMDCfgPanel { /** * GEN - LAST : event _ buttonResetSettingsActionPerformed */ private void buttonExportSettingsActionPerformed ( java . awt . event . ActionEvent evt ) { } }
// GEN - FIRST : event _ buttonExportSettingsActionPerformed File file = DialogProviderManager . getInstance ( ) . getDialogProvider ( ) . msgSaveFileDialog ( null , "exportSettings" , "Export settings" , lastExportedSettingsFile , true , new PropertiesFileFilter ( ) , "Save" ) ; if ( file != null ) { lastExportedSettingsFile = file ; if ( ! file . getName ( ) . toLowerCase ( Locale . ENGLISH ) . endsWith ( ".properties" ) ) { final Boolean addExt = DialogProviderManager . getInstance ( ) . getDialogProvider ( ) . msgConfirmYesNoCancel ( null , "Add extension" , "Add '.properties' extension?" ) ; if ( addExt == null ) { return ; } if ( addExt ) { file = new File ( file . getAbsolutePath ( ) + ".properties" ) ; } } if ( file . exists ( ) && ! DialogProviderManager . getInstance ( ) . getDialogProvider ( ) . msgConfirmOkCancel ( null , "Override file" , String . format ( "File %s exists, to override it?" , file . getName ( ) ) ) ) { return ; } final PropertiesPreferences prefs = new PropertiesPreferences ( "NB MindMap plugin" ) ; final MindMapPanelConfig cfg = store ( prefs , new MindMapPanelConfig ( ) , false ) ; cfg . saveTo ( prefs ) ; try { FileUtils . write ( file , prefs . toString ( ) , "UTF-8" ) ; } catch ( final Exception ex ) { LOGGER . error ( "Can't export settings" , ex ) ; DialogProviderManager . getInstance ( ) . getDialogProvider ( ) . msgError ( null , "Can't export settings [" + ex . getMessage ( ) + ']' ) ; } }
public class SizeLimitableBlockingQueue { /** * / * ( non - Javadoc ) * @ see java . util . Collection # retainAll ( java . util . Collection ) */ @ Override public boolean retainAll ( Collection < ? > c ) { } }
boolean b = queue . retainAll ( c ) ; if ( b ) { signalSizeReduced ( ) ; } return b ;
public class BasicRecordStoreLoader { /** * Loads the provided keys from the underlying map store * and transforms them to a list of alternating serialised key - value pairs . * @ param keys the keys for which values are loaded * @ return the list of loaded key - values * @ see com . hazelcast . core . MapLoader # loadAll ( Collection ) */ private List < Data > loadAndGet ( List < Data > keys ) { } }
try { Map entries = mapDataStore . loadAll ( keys ) ; return getKeyValueSequence ( entries ) ; } catch ( Throwable t ) { logger . warning ( "Could not load keys from map store" , t ) ; throw ExceptionUtil . rethrow ( t ) ; }
public class LessParser { /** * Concatenate 2 expressions to one expression . * @ param left * the left , can be null * @ param operator * the expression operation * @ param right * the right , can not be null * @ return the resulting expression */ private Expression concat ( Expression left , char operator , Expression right ) { } }
if ( left == null ) { return right ; } Operation op ; if ( left . getClass ( ) == Operation . class && ( ( Operation ) left ) . getOperator ( ) == operator ) { op = ( Operation ) left ; } else if ( right != null && right . getClass ( ) == Operation . class && ( ( Operation ) right ) . getOperator ( ) == operator ) { op = ( Operation ) right ; op . addLeftOperand ( left ) ; return op ; } else { op = new Operation ( reader , left , operator ) ; } if ( right != null ) { op . addOperand ( right ) ; } return op ;
public class PTSaxton2006 { /** * Equation 15 for calculating Slope of logarithmic tension - moisture curve * @ param slsnd Sand weight percentage by layer ( [ 0,100 ] % ) * @ param slcly Clay weight percentage by layer ( [ 0,100 ] % ) * @ param omPct Organic matter weight percentage by layer ( [ 0,100 ] % ) , ( = * SLOC * 1.72) */ private static String calcLamda ( String slsnd , String slcly , String omPct ) { } }
String mt33 = divide ( calcMoisture33Kpa ( slsnd , slcly , omPct ) , "100" ) ; String mt1500 = divide ( calcMoisture1500Kpa ( slsnd , slcly , omPct ) , "100" ) ; String ret = divide ( substract ( log ( mt33 ) , log ( mt1500 ) ) , CONST_LN1500_LN33 ) ; LOG . debug ( "Calculate result for Slope of logarithmic tension-moisture curve is {}" , ret ) ; return ret ;
public class JSON { /** * Parse a string of JSON text into a JSONArtifact . * @ param str The String to read from . * @ param order Boolean flag indicating if the order of the JSON data should be preserved . This parameter only has an effect if the stream is JSON Object { . . . } formatted data . * @ return Returns an instance of JSONArtifact ( JSONObject or JSONArray ) , corrisponding to if the input stream was Object or Array notation . * @ throws IOException Thrown on IO errors during parse . * @ throws NullPointerException Thrown if str is null */ public static JSONArtifact parse ( String str , boolean order ) throws IOException , NullPointerException { } }
if ( str != null ) { return parse ( new StringReader ( str ) , order ) ; } else { throw new NullPointerException ( "str cannot be null" ) ; }
public class ApiOvhOrder { /** * Create order * REST : POST / order / dedicated / server / { serviceName } / traffic / { duration } * @ param traffic [ required ] amount of traffic to allocate * @ param serviceName [ required ] The internal name of your dedicated server * @ param duration [ required ] Duration */ public OvhOrder dedicated_server_serviceName_traffic_duration_POST ( String serviceName , String duration , OvhTrafficOrderEnum traffic ) throws IOException { } }
String qPath = "/order/dedicated/server/{serviceName}/traffic/{duration}" ; StringBuilder sb = path ( qPath , serviceName , duration ) ; HashMap < String , Object > o = new HashMap < String , Object > ( ) ; addBody ( o , "traffic" , traffic ) ; String resp = exec ( qPath , "POST" , sb . toString ( ) , o ) ; return convertTo ( resp , OvhOrder . class ) ;
public class CloudhopperBuilder { /** * The SMPP server host ( IP or address ) . * You can specify a direct value . For example : * < pre > * . host ( " localhost " ) ; * < / pre > * You can also specify one or several property keys . For example : * < pre > * . host ( " $ { custom . property . high - priority } " , " $ { custom . property . low - priority } " ) ; * < / pre > * The properties are not immediately evaluated . The evaluation will be done * when the { @ link # build ( ) } method is called . * If you provide several property keys , evaluation will be done on the * first key and if the property exists ( see { @ link EnvironmentBuilder } ) , * its value is used . If the first property doesn ' t exist in properties , * then it tries with the second one and so on . * @ param host * one value , or one or several property keys * @ return this instance for fluent chaining */ public CloudhopperBuilder host ( String ... host ) { } }
for ( String h : host ) { if ( h != null ) { hosts . add ( h ) ; } } return this ;
public class FileUtils { /** * Returns the path of the file in the same manner as { @ link java . io . File # getPath ( ) } . * @ param file The file * @ return The path or null if the file is null or empty */ public static String path ( String file ) { } }
if ( StringUtils . isNullOrBlank ( file ) ) { return StringUtils . EMPTY ; } file = StringUtils . trim ( file ) ; int pos = indexOfLastSeparator ( file ) ; return pos == file . length ( ) - 1 ? file . substring ( 0 , file . length ( ) - 1 ) : file ;
public class Settings { /** * Public API . */ public static Settings load ( Map < String , String > config ) { } }
Map < Entry , String > rslt = new HashMap < Entry , String > ( ) ; Log log = null ; for ( Entry y : Entry . values ( ) ) { // Explicitly defined values trump defaults . . . String specValue = config . get ( y . getName ( ) ) ; if ( specValue != null ) { if ( log == null ) { log = LogFactory . getLog ( Settings . class ) ; // Don ' t declare as static in general libraries } if ( log . isTraceEnabled ( ) ) { StringBuffer msg = new StringBuffer ( ) ; msg . append ( "Default value for setting '" ) . append ( y . name ( ) ) . append ( "' was overridden with value '" ) . append ( specValue ) . append ( "'" ) ; log . trace ( msg . toString ( ) ) ; } } rslt . put ( y , specValue != null ? specValue : y . getDefaultValue ( ) ) ; } return new Settings ( rslt ) ;
public class Reflection { /** * Sets the value of a field of an object instance via reflection * @ param instance to inspect * @ param fieldName name of field to set * @ param value the value to set */ public static void setValue ( Object instance , String fieldName , Object value ) { } }
try { Field f = findFieldRecursively ( instance . getClass ( ) , fieldName ) ; if ( f == null ) throw new NoSuchMethodException ( "Cannot find field " + fieldName + " on " + instance . getClass ( ) + " or superclasses" ) ; f . setAccessible ( true ) ; f . set ( instance , value ) ; } catch ( Exception e ) { throw new RuntimeException ( e ) ; }
public class DatabaseUtil { /** * Find object by matching their labels . * @ param database Database to search in * @ param name _ pattern Name to match against class or object label * @ return found cluster or it throws an exception . */ public static ArrayModifiableDBIDs getObjectsByLabelMatch ( Database database , Pattern name_pattern ) { } }
Relation < String > relation = guessLabelRepresentation ( database ) ; if ( name_pattern == null ) { return DBIDUtil . newArray ( ) ; } ArrayModifiableDBIDs ret = DBIDUtil . newArray ( ) ; for ( DBIDIter iditer = relation . iterDBIDs ( ) ; iditer . valid ( ) ; iditer . advance ( ) ) { if ( name_pattern . matcher ( relation . get ( iditer ) ) . find ( ) ) { ret . add ( iditer ) ; } } return ret ;
public class Scope { /** * Return scope handler or parent ' s scope handler if this scope doesn ' t have one . * @ return Scope handler ( or parent ' s one ) */ public IScopeHandler getHandler ( ) { } }
log . trace ( "getHandler from {}" , name ) ; if ( handler != null ) { return handler ; } else if ( hasParent ( ) ) { return getParent ( ) . getHandler ( ) ; } else { return null ; }
public class OperaDesktopDriver { /** * Waits until the menu item is pressed and then returns the text of the menu item pressed * @ param menuItemText - window to wait for shown event on * @ return text of the menu item */ public String waitForMenuItemPressed ( String menuItemText ) { } }
if ( getScopeServices ( ) . getConnection ( ) == null ) { throw new CommunicationException ( "waiting for a menu item to be pressed failed because Opera is not connected." ) ; } return getScopeServices ( ) . waitForMenuItemPressed ( menuItemText , OperaIntervals . MENU_EVENT_TIMEOUT . getMs ( ) ) ;
public class JFAPCommunicator { /** * Works out the capabilities that will be sent to the peer as part of the initial handshake . * This also takes into account any overrides from the SIB properties file . * @ return Returns the capabilities . */ private short getClientCapabilities ( ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( this , tc , "getClientCapabilities" ) ; short capabilities = CommsConstants . CAPABILITIES_DEFAULT ; // Allow the use of a runtime property to alter the capability that we require a non - java // bootstrap to locate an ME boolean nonJavaBootstrap = CommsUtils . getRuntimeBooleanProperty ( CommsConstants . CAPABILITIY_REQUIRES_NONJAVA_BOOTSTRAP_KEY , CommsConstants . CAPABILITIY_REQUIRES_NONJAVA_BOOTSTRAP_DEF ) ; if ( nonJavaBootstrap ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) SibTr . debug ( this , tc , "Requesting non-java bootstrap" ) ; // This bit is off by default , so turn it on capabilities |= CommsConstants . CAPABILITIY_REQUIRES_NONJAVA_BOOTSTRAP ; } // Allow the use of a runtime property to alter the capability that we require JMF messages boolean jmfMessagesOnly = CommsUtils . getRuntimeBooleanProperty ( CommsConstants . CAPABILITIY_REQUIRES_JMF_ENCODING_KEY , CommsConstants . CAPABILITIY_REQUIRES_JMF_ENCODING_DEF ) ; if ( jmfMessagesOnly ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) SibTr . debug ( this , tc , "Requesting JMF Only" ) ; // This bit is off by default , so turn it on capabilities |= CommsConstants . CAPABILITIY_REQUIRES_JMF_ENCODING ; } // Allow the use of a runtime property to alter the capability that we require JMS messages boolean jmsMessagesOnly = CommsUtils . getRuntimeBooleanProperty ( CommsConstants . CAPABILITIY_REQUIRES_JMS_MESSAGES_KEY , CommsConstants . CAPABILITIY_REQUIRES_JMS_MESSAGES_DEF ) ; if ( jmsMessagesOnly ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) SibTr . debug ( this , tc , "Requesting JMS Only" ) ; // This bit is off by default , so turn it on capabilities |= CommsConstants . CAPABILITIY_REQUIRES_JMS_MESSAGES ; } // Allow the use of a runtime property to turn off optimized transactions . boolean disableOptimizedTx = CommsUtils . getRuntimeBooleanProperty ( CommsConstants . DISABLE_OPTIMIZED_TX_KEY , CommsConstants . DISABLE_OPTIMIZED_TX ) ; if ( disableOptimizedTx ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) SibTr . debug ( this , tc , "Disabling use of optimized transactions" ) ; // This bit is set on by default , so we must turn it off capabilities &= ( 0xFFFF ^ CommsConstants . CAPABILITIY_REQUIRES_OPTIMIZED_TX ) ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( this , tc , "getClientCapabilities" , capabilities ) ; return capabilities ;
public class RawLocalFileSystem { /** * Use the command chmod to set permission . */ @ Override public void setPermission ( Path p , FsPermission permission ) throws IOException { } }
FsAction user = permission . getUserAction ( ) ; FsAction group = permission . getGroupAction ( ) ; FsAction other = permission . getOtherAction ( ) ; File f = pathToFile ( p ) ; // Fork chmod if group and other permissions are different . . . if ( group != other ) { execSetPermission ( f , permission ) ; return ; } boolean rv = true ; // read perms rv = f . setReadable ( group . implies ( FsAction . READ ) , false ) ; checkReturnValue ( rv , p , permission ) ; if ( group . implies ( FsAction . READ ) != user . implies ( FsAction . READ ) ) { f . setReadable ( user . implies ( FsAction . READ ) , true ) ; checkReturnValue ( rv , p , permission ) ; } // write perms rv = f . setWritable ( group . implies ( FsAction . WRITE ) , false ) ; checkReturnValue ( rv , p , permission ) ; if ( group . implies ( FsAction . WRITE ) != user . implies ( FsAction . WRITE ) ) { f . setWritable ( user . implies ( FsAction . WRITE ) , true ) ; checkReturnValue ( rv , p , permission ) ; } // exec perms rv = f . setExecutable ( group . implies ( FsAction . EXECUTE ) , false ) ; checkReturnValue ( rv , p , permission ) ; if ( group . implies ( FsAction . EXECUTE ) != user . implies ( FsAction . EXECUTE ) ) { f . setExecutable ( user . implies ( FsAction . EXECUTE ) , true ) ; checkReturnValue ( rv , p , permission ) ; }
public class TypedCollections { /** * Dynamically check that the members of the list are all instances of * the given type ( or null ) . * @ param < E > * the list ' s element type * @ param list * the list to cast * @ param type * the class of the list ' s element type . * @ return the dynamically - type checked list . * @ throws java . lang . ClassCastException */ @ SuppressWarnings ( "unchecked" ) static < E > List < E > dynamicallyCastList ( List < ? > list , Class < E > type ) { } }
return dynamicallyCastCollection ( list , type , List . class ) ;
public class AbstractJSSEProvider { /** * this provides the SSL context instance */ private void getKeyTrustManagers ( Map < String , Object > connectionInfo , SSLConfig sslConfig , List < KeyManager > kmHolder , List < TrustManager > tmHolder ) throws Exception { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) Tr . entry ( tc , "getKeyTrustManagers" , new Object [ ] { connectionInfo , sslConfig } ) ; TrustManagerFactory trustManagerFactory = null ; KeyManagerFactory keyManagerFactory = null ; KeyStore keyStore = null ; KeyStore trustStore = null ; boolean createKeyMgr = true ; String direction = Constants . DIRECTION_UNKNOWN ; if ( connectionInfo != null ) { direction = ( String ) connectionInfo . get ( Constants . CONNECTION_INFO_DIRECTION ) ; } try { // Access a potentially set contextProvider . String trustFileName = getSSLContextProperty ( Constants . SSLPROP_TRUST_STORE_NAME , sslConfig ) ; WSKeyStore wsts = null ; if ( trustFileName != null ) wsts = KeyStoreManager . getInstance ( ) . getKeyStore ( trustFileName ) ; if ( wsts != null ) { trustStore = wsts . getKeyStore ( false , false ) ; } String keyFileName = getSSLContextProperty ( Constants . SSLPROP_KEY_STORE_NAME , sslConfig ) ; WSKeyStore wsks = null ; if ( keyFileName != null ) wsks = KeyStoreManager . getInstance ( ) . getKeyStore ( keyFileName ) ; if ( wsks != null ) { keyStore = wsks . getKeyStore ( false , false ) ; } boolean usingHwCryptoTrustStore = false ; boolean usingHwCryptoKeyStore = false ; String ctxtProvider = getSSLContextProperty ( Constants . SSLPROP_CONTEXT_PROVIDER , sslConfig ) ; String keyMgr = getSSLContextProperty ( Constants . SSLPROP_KEY_MANAGER , sslConfig ) ; String trustMgr = getSSLContextProperty ( Constants . SSLPROP_TRUST_MANAGER , sslConfig ) ; String clientAuthentication = getSSLContextProperty ( Constants . SSLPROP_CLIENT_AUTHENTICATION , sslConfig ) ; String clientAliasName = getSSLContextProperty ( Constants . SSLPROP_KEY_STORE_CLIENT_ALIAS , sslConfig ) ; String serverAliasName = getSSLContextProperty ( Constants . SSLPROP_KEY_STORE_SERVER_ALIAS , sslConfig ) ; String tokenLibraryFile = getSSLContextProperty ( Constants . SSLPROP_TOKEN_LIBRARY , sslConfig ) ; String tokenPassword = getSSLContextProperty ( Constants . SSLPROP_TOKEN_PASSWORD , sslConfig ) ; String tokenType = getSSLContextProperty ( Constants . SSLPROP_TOKEN_TYPE , sslConfig ) ; String tokenSlot = getSSLContextProperty ( Constants . SSLPROP_TOKEN_SLOT , sslConfig ) ; char [ ] passPhrase = null ; // Handle Trust Store if ( trustStore != null ) { // Trust store specified . if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "Using trust store: " + wsts . getLocation ( ) ) ; } } else { // No trust store specified . Check if hw crypto is involved . if ( tokenLibraryFile != null ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "No trust store specified, but found hardware crypto" ) ; } WSPKCSInKeyStore pKS = pkcsStoreList . insert ( tokenType , tokenLibraryFile , tokenPassword , false , ctxtProvider ) ; if ( pKS != null ) { trustStore = pKS . getTS ( ) ; trustManagerFactory = pKS . getTMF ( ) ; usingHwCryptoTrustStore = true ; } } else { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "No trust store specified and no hardware crypto defined" ) ; } // No hw crypto . if ( direction . equals ( Constants . DIRECTION_INBOUND ) && ( clientAuthentication . equals ( Constants . FALSE ) ) ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "trust store permitted to be null since this is inbound and client auth is false" ) ; } } else { throw new IllegalArgumentException ( "Invalid trust file name of null" ) ; } } } if ( ! usingHwCryptoTrustStore ) { // Get instance of trust manager factory . Use contextProvider if // available . // Already got trustManagerFactory for crypto trustManagerFactory = getTrustManagerFactoryInstance ( trustMgr , ctxtProvider ) ; String ldapCertstoreHost = System . getProperty ( Constants . SSLPROP_LDAP_CERT_STORE_HOST ) ; String ldapCertstorePortS = System . getProperty ( Constants . SSLPROP_LDAP_CERT_STORE_PORT ) ; int ldapCertstorePort = ldapCertstorePortS == null ? 389 : Integer . parseInt ( ldapCertstorePortS ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "certStoreHost: " + ldapCertstoreHost ) ; Tr . debug ( tc , "certStorePort: " + ldapCertstorePort ) ; Tr . debug ( tc , "trustManagerAlgorithm: " + trustManagerFactory . getAlgorithm ( ) ) ; } if ( ldapCertstoreHost != null && trustManagerFactory != null && ( trustManagerFactory . getAlgorithm ( ) . equals ( "IbmPKIX" ) ) ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "Adding ldap cert store " + ldapCertstoreHost + ":" + ldapCertstorePort + " " ) ; } PKIXBuilderParameters pkixParams = new PKIXBuilderParameters ( trustStore , new X509CertSelector ( ) ) ; // create the ldap parms LDAPCertStoreParameters LDAPParms = new LDAPCertStoreParameters ( ldapCertstoreHost , ldapCertstorePort ) ; pkixParams . addCertStore ( CertStore . getInstance ( "LDAP" , LDAPParms ) ) ; // enable revocation checking pkixParams . setRevocationEnabled ( true ) ; // Wrap them as trust manager parameters ManagerFactoryParameters trustParams = new CertPathTrustManagerParameters ( pkixParams ) ; trustManagerFactory . init ( trustParams ) ; } else if ( null != trustManagerFactory ) { trustManagerFactory . init ( trustStore ) ; } } // Handle Key Store if ( keyStore != null ) { // Key store specified . if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "Using software keystore: " + wsks . getLocation ( ) ) ; } } else { // No key store specified . Check if hw crypto is involved . if ( tokenLibraryFile != null ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "No key store specified, but found hardware crypto" ) ; } // Hw crypto is involved . Build the trust store in a hw unique way . // First check to see if the same keystore is used by the trust // manager . if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "Reusing key store from Trust Manager" ) ; } WSPKCSInKeyStore pKS = pkcsStoreList . insert ( tokenType , tokenLibraryFile , tokenPassword , true , ctxtProvider ) ; if ( pKS != null ) { keyStore = pKS . getKS ( ) ; keyManagerFactory = pKS . getKMF ( ) ; usingHwCryptoKeyStore = true ; } } else { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "No key store specified and no hardware crypto defined" ) ; } throw new IllegalArgumentException ( "No key store specified and no hardware crypto defined" ) ; } } if ( ! usingHwCryptoKeyStore ) { // Get an instance of the key manager factory . keyManagerFactory = getKeyManagerFactoryInstance ( keyMgr , ctxtProvider ) ; String kspass = wsks . getPassword ( ) ; if ( ! kspass . isEmpty ( ) ) { try { SerializableProtectedString keypass = wsks . getKeyPassword ( ) ; String decodedPass = WSKeyStore . decodePassword ( new String ( keypass . getChars ( ) ) ) ; synchronized ( _lockObj ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "Entering synchronized block around key manager factory init." ) ; } keyManagerFactory . init ( keyStore , decodedPass . toCharArray ( ) ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "Exiting synchronized block around key manager factory init." ) ; } } } catch ( UnrecoverableKeyException exc ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "Error initializing key manager, the password can not be used to recover all keys" ) ; } Tr . error ( tc , "ssl.unrecoverablekey.error.CWPKI0813E" , new Object [ ] { wsks . getLocation ( ) , exc . getMessage ( ) } ) ; throw new UnrecoverableKeyException ( exc . getMessage ( ) + ": invalid password for key in file '" + wsks . getLocation ( ) + "'" ) ; } } else { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "No password provide so do not create a keymanager" ) ; } createKeyMgr = false ; } } if ( createKeyMgr ) { // Initialize the SSL context with the key and trust manager factories . WSX509KeyManager wsKeyManager = new WSX509KeyManager ( keyStore , passPhrase , keyManagerFactory , sslConfig , null ) ; if ( serverAliasName != null && serverAliasName . length ( ) > 0 ) wsKeyManager . setServerAlias ( serverAliasName ) ; if ( clientAliasName != null && clientAliasName . length ( ) > 0 ) wsKeyManager . setClientAlias ( clientAliasName ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) Tr . debug ( tc , "Initializing WSX509KeyManager." , new Object [ ] { serverAliasName , clientAliasName , tokenSlot } ) ; kmHolder . add ( wsKeyManager ) ; } // prepare trust manager wrapper . TrustManager [ ] defaultTMArray = trustManagerFactory . getTrustManagers ( ) ; WSX509TrustManager wsTrustManager = new WSX509TrustManager ( defaultTMArray , connectionInfo , sslConfig , trustFileName , wsts . getLocation ( ) ) ; tmHolder . add ( wsTrustManager ) ; } catch ( Exception e ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) Tr . debug ( tc , "Exception caught during init, " + e ) ; FFDCFilter . processException ( e , getClass ( ) . getName ( ) , "getKeyTrustManagers" , this ) ; throw e ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) Tr . exit ( tc , "getKeyTrustManagers" ) ;
public class Part { /** * Create a file multi - part field , from byte array data . * This return a part equivalent to & lt ; input type = " file " / & gt ; field in multi part form . */ public static Part < byte [ ] > file ( String name , String fileName , byte [ ] bytes ) { } }
return new Part < > ( name , fileName , bytes , CONTENT_TYPE_BINARY , null , ( body , out , charset ) -> out . write ( body ) ) ;
public class CleverTapAPI { /** * For Google Play Store / Android Studio analytics */ private static boolean checkForExoPlayer ( ) { } }
boolean exoPlayerPresent = false ; Class className = null ; try { className = Class . forName ( "com.google.android.exoplayer2.ExoPlayerFactory" ) ; className = Class . forName ( "com.google.android.exoplayer2.source.hls.HlsMediaSource" ) ; className = Class . forName ( "com.google.android.exoplayer2.ui.PlayerView" ) ; Logger . d ( "ExoPlayer is present" ) ; exoPlayerPresent = true ; } catch ( Throwable t ) { Logger . d ( "ExoPlayer library files are missing!!!" ) ; Logger . d ( "Please add ExoPlayer dependencies to render InApp or Inbox messages playing video. For more information checkout CleverTap documentation." ) ; if ( className != null ) Logger . d ( "ExoPlayer classes not found " + className . getName ( ) ) ; else Logger . d ( "ExoPlayer classes not found" ) ; } return exoPlayerPresent ;
public class AmazonEC2Client { /** * Launches the specified Scheduled Instances . * Before you can launch a Scheduled Instance , you must purchase it and obtain an identifier using * < a > PurchaseScheduledInstances < / a > . * You must launch a Scheduled Instance during its scheduled time period . You can ' t stop or reboot a Scheduled * Instance , but you can terminate it as needed . If you terminate a Scheduled Instance before the current scheduled * time period ends , you can launch it again after a few minutes . For more information , see < a * href = " https : / / docs . aws . amazon . com / AWSEC2 / latest / UserGuide / ec2 - scheduled - instances . html " > Scheduled Instances < / a > * in the < i > Amazon Elastic Compute Cloud User Guide < / i > . * @ param runScheduledInstancesRequest * Contains the parameters for RunScheduledInstances . * @ return Result of the RunScheduledInstances operation returned by the service . * @ sample AmazonEC2 . RunScheduledInstances * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / ec2-2016-11-15 / RunScheduledInstances " target = " _ top " > AWS API * Documentation < / a > */ @ Override public RunScheduledInstancesResult runScheduledInstances ( RunScheduledInstancesRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeRunScheduledInstances ( request ) ;
public class LocalTimeCLA { /** * { @ inheritDoc } */ @ Override public LocalTime [ ] getValueAsLocalTimeArray ( ) throws ParseException { } }
final LocalTime [ ] result = new LocalTime [ size ( ) ] ; for ( int r = 0 ; r < size ( ) ; r ++ ) result [ r ] = getValue ( r ) ; return result ;
public class DirectoryServiceClient { /** * Close the DirectoryServiceClient . */ public void close ( ) { } }
if ( connection != null ) { try { connection . close ( ) ; } catch ( IOException e ) { LOGGER . warn ( "Close the DirectoryConnection get exception - " + e . getMessage ( ) ) ; } connection = null ; } if ( watcherManager != null ) { watcherManager . cleanup ( ) ; watcherManager = null ; }
public class PojoSerializerSnapshotData { /** * Creates a { @ link PojoSerializerSnapshotData } from configuration of a { @ link PojoSerializer } . * < p > This factory method is meant to be used in regular write paths , i . e . when taking a snapshot * of the { @ link PojoSerializer } . All registered subclass classes , and non - registered * subclass classes are all present . Some POJO fields may be absent , if the originating * { @ link PojoSerializer } was a restored one with already missing fields , and was never replaced * by a new { @ link PojoSerializer } ( i . e . because the serialized old data was never accessed ) . */ static < T > PojoSerializerSnapshotData < T > createFrom ( Class < T > pojoClass , Field [ ] fields , TypeSerializer < ? > [ ] fieldSerializers , LinkedHashMap < Class < ? > , TypeSerializer < ? > > registeredSubclassSerializers , Map < Class < ? > , TypeSerializer < ? > > nonRegisteredSubclassSerializers ) { } }
final LinkedOptionalMap < Field , TypeSerializerSnapshot < ? > > fieldSerializerSnapshots = new LinkedOptionalMap < > ( fields . length ) ; for ( int i = 0 ; i < fields . length ; i ++ ) { Field field = fields [ i ] ; String fieldName = ( field == null ) ? getDummyNameForMissingField ( i ) : field . getName ( ) ; fieldSerializerSnapshots . put ( fieldName , field , TypeSerializerUtils . snapshotBackwardsCompatible ( fieldSerializers [ i ] ) ) ; } LinkedHashMap < Class < ? > , TypeSerializerSnapshot < ? > > registeredSubclassSerializerSnapshots = new LinkedHashMap < > ( registeredSubclassSerializers . size ( ) ) ; registeredSubclassSerializers . forEach ( ( k , v ) -> registeredSubclassSerializerSnapshots . put ( k , TypeSerializerUtils . snapshotBackwardsCompatible ( v ) ) ) ; Map < Class < ? > , TypeSerializerSnapshot < ? > > nonRegisteredSubclassSerializerSnapshots = new HashMap < > ( nonRegisteredSubclassSerializers . size ( ) ) ; nonRegisteredSubclassSerializers . forEach ( ( k , v ) -> nonRegisteredSubclassSerializerSnapshots . put ( k , TypeSerializerUtils . snapshotBackwardsCompatible ( v ) ) ) ; return new PojoSerializerSnapshotData < > ( pojoClass , fieldSerializerSnapshots , optionalMapOf ( registeredSubclassSerializerSnapshots , Class :: getName ) , optionalMapOf ( nonRegisteredSubclassSerializerSnapshots , Class :: getName ) ) ;
public class ContactsInterface { /** * Get the collection of contacts for the calling user . * @ return The Collection of Contact objects */ public Collection < Contact > getList ( ) throws FlickrException { } }
ContactList < Contact > contacts = new ContactList < Contact > ( ) ; Map < String , Object > parameters = new HashMap < String , Object > ( ) ; parameters . put ( "method" , METHOD_GET_LIST ) ; Response response = transportAPI . get ( transportAPI . getPath ( ) , parameters , apiKey , sharedSecret ) ; if ( response . isError ( ) ) { throw new FlickrException ( response . getErrorCode ( ) , response . getErrorMessage ( ) ) ; } Element contactsElement = response . getPayload ( ) ; contacts . setPage ( contactsElement . getAttribute ( "page" ) ) ; contacts . setPages ( contactsElement . getAttribute ( "pages" ) ) ; contacts . setPerPage ( contactsElement . getAttribute ( "perpage" ) ) ; contacts . setTotal ( contactsElement . getAttribute ( "total" ) ) ; NodeList contactNodes = contactsElement . getElementsByTagName ( "contact" ) ; for ( int i = 0 ; i < contactNodes . getLength ( ) ; i ++ ) { Element contactElement = ( Element ) contactNodes . item ( i ) ; Contact contact = new Contact ( ) ; contact . setId ( contactElement . getAttribute ( "nsid" ) ) ; contact . setUsername ( contactElement . getAttribute ( "username" ) ) ; contact . setRealName ( contactElement . getAttribute ( "realname" ) ) ; contact . setFriend ( "1" . equals ( contactElement . getAttribute ( "friend" ) ) ) ; contact . setFamily ( "1" . equals ( contactElement . getAttribute ( "family" ) ) ) ; contact . setIgnored ( "1" . equals ( contactElement . getAttribute ( "ignored" ) ) ) ; String lPathAlias = contactElement . getAttribute ( "path_alias" ) ; contact . setPathAlias ( lPathAlias == null || "" . equals ( lPathAlias ) ? null : lPathAlias ) ; contact . setOnline ( OnlineStatus . fromType ( contactElement . getAttribute ( "online" ) ) ) ; contact . setIconFarm ( contactElement . getAttribute ( "iconfarm" ) ) ; contact . setIconServer ( contactElement . getAttribute ( "iconserver" ) ) ; if ( contact . getOnline ( ) == OnlineStatus . AWAY ) { contactElement . normalize ( ) ; contact . setAwayMessage ( XMLUtilities . getValue ( contactElement ) ) ; } contacts . add ( contact ) ; } return contacts ;
public class Classes { /** * Get named field of requested class class or its super - classes package hierarchy , with checked exception . Tries to * get requested field from given class ; if not found try with super - classes hierarchy but limited to requested class * package . If field still not found throw { @ link NoSuchFieldException } . * Implementation note : if field not found on requested class this method is executed recursively as long as * superclass is in the same package as requested base class . Is not possible to retrieve inherited fields if * superclass descendant is in different package . * @ param clazz class to search for named field , * @ param fieldName the name of field to retrieve . * @ return requested field . * @ throws NoSuchFieldException if class or super - class package hierarchy has no field with requested name . */ public static Field getFieldEx ( Class < ? > clazz , String fieldName ) throws NoSuchFieldException { } }
try { Field field = clazz . getDeclaredField ( fieldName ) ; field . setAccessible ( true ) ; return field ; } catch ( NoSuchFieldException e ) { Class < ? > superclass = clazz . getSuperclass ( ) ; if ( superclass != null && clazz . getPackage ( ) . equals ( superclass . getPackage ( ) ) ) { return getFieldEx ( superclass , fieldName ) ; } throw e ; } catch ( SecurityException e ) { throw new BugError ( e ) ; }
public class CmsSchedulerThreadPool { /** * Terminate all threads in this thread group . < p > * @ param waitForJobsToComplete if true , , all current jobs will be allowed to complete */ public void shutdown ( boolean waitForJobsToComplete ) { } }
m_isShutdown = true ; // signal each scheduler thread to shut down for ( int i = 0 ; i < m_currentThreadCount ; i ++ ) { if ( m_workers [ i ] != null ) { m_workers [ i ] . shutdown ( ) ; } } // give waiting ( wait ( 1000 ) ) worker threads a chance to shut down // active worker threads will shut down after finishing their // current job synchronized ( m_nextRunnableLock ) { m_nextRunnableLock . notifyAll ( ) ; } if ( waitForJobsToComplete ) { // wait until all worker threads are shut down int alive = m_currentThreadCount ; while ( alive > 0 ) { alive = 0 ; for ( int i = 0 ; i < m_currentThreadCount ; i ++ ) { if ( m_workers [ i ] . isAlive ( ) ) { try { if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( Messages . get ( ) . getBundle ( ) . key ( Messages . LOG_THREAD_POOL_WAITING_1 , new Integer ( i ) ) ) ; } // note : with waiting infinite - join ( 0 ) - the application // may appear to ' hang ' // waiting for a finite time however requires an additional loop ( alive ) alive ++ ; m_workers [ i ] . join ( 200 ) ; } catch ( InterruptedException e ) { // can be ignored } } } } int activeCount = m_threadGroup . activeCount ( ) ; if ( ( activeCount > 0 ) && LOG . isInfoEnabled ( ) ) { LOG . info ( Messages . get ( ) . getBundle ( ) . key ( Messages . LOG_THREAD_POOL_STILL_ACTIVE_1 , new Integer ( activeCount ) ) ) ; } if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( Messages . get ( ) . getBundle ( ) . key ( Messages . LOG_THREAD_POOL_SHUTDOWN_0 ) ) ; } }
public class XMLUnit { /** * Compare two XML documents provided as strings * @ param control Control document * @ param test Document to test * @ return Diff object describing differences in documents * @ throws SAXException * @ throws IOException */ public static Diff compareXML ( String control , String test ) throws SAXException , IOException { } }
return new Diff ( control , test ) ;
public class ApplicationGatewaysInner { /** * Lists all SSL predefined policies for configuring Ssl policy . * ServiceResponse < PageImpl < ApplicationGatewaySslPredefinedPolicyInner > > * @ param nextPageLink The NextLink from the previous successful call to List operation . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the PagedList & lt ; ApplicationGatewaySslPredefinedPolicyInner & gt ; object wrapped in { @ link ServiceResponse } if successful . */ public Observable < ServiceResponse < Page < ApplicationGatewaySslPredefinedPolicyInner > > > listAvailableSslPredefinedPoliciesNextSinglePageAsync ( final String nextPageLink ) { } }
if ( nextPageLink == null ) { throw new IllegalArgumentException ( "Parameter nextPageLink is required and cannot be null." ) ; } String nextUrl = String . format ( "%s" , nextPageLink ) ; return service . listAvailableSslPredefinedPoliciesNext ( nextUrl , this . client . acceptLanguage ( ) , this . client . userAgent ( ) ) . flatMap ( new Func1 < Response < ResponseBody > , Observable < ServiceResponse < Page < ApplicationGatewaySslPredefinedPolicyInner > > > > ( ) { @ Override public Observable < ServiceResponse < Page < ApplicationGatewaySslPredefinedPolicyInner > > > call ( Response < ResponseBody > response ) { try { ServiceResponse < PageImpl < ApplicationGatewaySslPredefinedPolicyInner > > result = listAvailableSslPredefinedPoliciesNextDelegate ( response ) ; return Observable . just ( new ServiceResponse < Page < ApplicationGatewaySslPredefinedPolicyInner > > ( result . body ( ) , result . response ( ) ) ) ; } catch ( Throwable t ) { return Observable . error ( t ) ; } } } ) ;
public class ModelsImpl { /** * Get All Entity Roles for a given entity . * @ param appId The application ID . * @ param versionId The version ID . * @ param hEntityId The hierarchical entity extractor ID . * @ param serviceCallback the async ServiceCallback to handle successful and failed responses . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the { @ link ServiceFuture } object */ public ServiceFuture < List < EntityRole > > getHierarchicalEntityRolesAsync ( UUID appId , String versionId , UUID hEntityId , final ServiceCallback < List < EntityRole > > serviceCallback ) { } }
return ServiceFuture . fromResponse ( getHierarchicalEntityRolesWithServiceResponseAsync ( appId , versionId , hEntityId ) , serviceCallback ) ;
public class MountPointInfo { /** * < code > optional string ufsUri = 1 ; < / code > */ public java . lang . String getUfsUri ( ) { } }
java . lang . Object ref = ufsUri_ ; if ( ref instanceof java . lang . String ) { return ( java . lang . String ) ref ; } else { com . google . protobuf . ByteString bs = ( com . google . protobuf . ByteString ) ref ; java . lang . String s = bs . toStringUtf8 ( ) ; if ( bs . isValidUtf8 ( ) ) { ufsUri_ = s ; } return s ; }
public class DecimalFormat { /** * Sets the decimal format symbols used by this format . The format uses a copy of the * provided symbols . * @ param newSymbols desired DecimalFormatSymbols * @ see DecimalFormatSymbols */ public void setDecimalFormatSymbols ( DecimalFormatSymbols newSymbols ) { } }
symbols = ( DecimalFormatSymbols ) newSymbols . clone ( ) ; setCurrencyForSymbols ( ) ; expandAffixes ( null ) ;
public class ServiceRegistryLoader { /** * / * ( non - Javadoc ) * @ see org . jboss . arquillian . core . spi . ServiceLoader # onlyOne ( java . lang . Class , java . lang . Class ) */ @ Override public < T > T onlyOne ( Class < T > serviceClass , Class < ? extends T > defaultServiceClass ) { } }
T one = null ; try { one = onlyOne ( serviceClass ) ; } catch ( Exception e ) { } if ( one == null ) { one = createServiceInstance ( defaultServiceClass ) ; } return one ;
public class CommerceShipmentLocalServiceWrapper { /** * Creates a new commerce shipment with the primary key . Does not add the commerce shipment to the database . * @ param commerceShipmentId the primary key for the new commerce shipment * @ return the new commerce shipment */ @ Override public com . liferay . commerce . model . CommerceShipment createCommerceShipment ( long commerceShipmentId ) { } }
return _commerceShipmentLocalService . createCommerceShipment ( commerceShipmentId ) ;
public class KeepAliveManager { /** * Transport has active streams . Start sending keepalives if necessary . */ public synchronized void onTransportActive ( ) { } }
if ( state == State . IDLE ) { // When the transport goes active , we do not reset the nextKeepaliveTime . This allows us to // quickly check whether the connection is still working . state = State . PING_SCHEDULED ; if ( pingFuture == null ) { pingFuture = scheduler . schedule ( sendPing , keepAliveTimeInNanos - stopwatch . elapsed ( TimeUnit . NANOSECONDS ) , TimeUnit . NANOSECONDS ) ; } } else if ( state == State . IDLE_AND_PING_SENT ) { state = State . PING_SENT ; } // Other states are possible when keepAliveDuringTransportIdle = = true
public class SgUtils { /** * $ { env . MY _ ENV _ VAR : - default } */ private static String resolveEnvVar ( String envVarName , String mode , boolean bc ) { } }
final String envVarValue = System . getenv ( envVarName ) ; if ( envVarValue == null || envVarValue . isEmpty ( ) ) { if ( mode != null && mode . startsWith ( ":-" ) && mode . length ( ) > 2 ) { return bc ? Hasher . hash ( mode . substring ( 2 ) . toCharArray ( ) ) : mode . substring ( 2 ) ; } else { return null ; } } else { return bc ? Hasher . hash ( envVarValue . toCharArray ( ) ) : envVarValue ; }
public class CachedObject { /** * Add the given object as user of this cached data . */ public synchronized void use ( Object user ) { } }
usage ++ ; lastUsage = System . currentTimeMillis ( ) ; if ( users != null ) users . add ( user ) ;
public class JShell { /** * Get the current value of a variable . * @ param snippet the variable Snippet whose value is queried . * @ return the current value of the variable referenced by snippet . * @ throws IllegalStateException if this { @ code JShell } instance is closed . * @ throws IllegalArgumentException if the snippet is not associated with * this { @ code JShell } instance . * @ throws IllegalArgumentException if the variable ' s status is anything but * { @ link jdk . jshell . Snippet . Status # VALID } . */ public String varValue ( VarSnippet snippet ) throws IllegalStateException { } }
checkIfAlive ( ) ; checkValidSnippet ( snippet ) ; if ( snippet . status ( ) != Status . VALID ) { throw new IllegalArgumentException ( messageFormat ( "jshell.exc.var.not.valid" , snippet , snippet . status ( ) ) ) ; } String value ; try { value = executionControl ( ) . varValue ( snippet . classFullName ( ) , snippet . name ( ) ) ; } catch ( EngineTerminationException ex ) { throw new IllegalStateException ( ex . getMessage ( ) ) ; } catch ( ExecutionControlException ex ) { debug ( ex , "In varValue()" ) ; return "[" + ex . getMessage ( ) + "]" ; } return expunge ( value ) ;
public class ScalableStatistics { /** * Serializes the data structure into a byte array */ public byte [ ] asBytes ( ) { } }
byte [ ] className = tdigest . getClass ( ) . getName ( ) . getBytes ( Charsets . UTF_8 ) ; int vlen = exactValues == null ? 0 : numExactValues ; ByteBuffer buf = ByteBuffer . allocate ( 4 + 8 * 5 + 4 + 4 + 8 * vlen + 4 + className . length + tdigest . byteSize ( ) + 4 ) ; buf . putInt ( MAGIC_CODE ) ; // for sanity checks // write basic descriptive stats buf . putLong ( count ) ; buf . putDouble ( min ) ; buf . putDouble ( max ) ; buf . putDouble ( sum ) ; buf . putDouble ( sumOfSquares ) ; // write exact values , if any buf . putInt ( exactQuantilesThreshold ) ; buf . putInt ( exactValues == null ? - 1 : numExactValues ) ; if ( exactValues != null ) { for ( int i = 0 ; i < numExactValues ; i ++ ) { buf . putDouble ( exactValues [ i ] ) ; } } // write tdigest buf . putInt ( className . length ) ; buf . put ( className ) ; tdigest . asBytes ( buf ) ; buf . putInt ( MAGIC_CODE ) ; // for sanity checks Preconditions . checkArgument ( buf . position ( ) == buf . capacity ( ) ) ; return buf . array ( ) ;
public class DeferredService { /** * Unregister information provided after class loader was created */ public void deregisterDeferredService ( ) { } }
Object obj = serviceReg . get ( ) ; if ( obj == null ) { // already deregistered so there is nothing to be done return ; } if ( obj instanceof CountDownLatch ) { // If someone else has the latch , then let them do whatever they are doing and we pretend // we ' ve already done the deregister . return ; } else if ( obj instanceof ServiceRegistration < ? > ) { CountDownLatch latch = new CountDownLatch ( 1 ) ; if ( serviceReg . compareAndSet ( obj , latch ) ) { // This thread won the right to deregister the service try { ( ( ServiceRegistration < ? > ) obj ) . unregister ( ) ; // successfully deregistered - nothing more to do return ; } finally { // if the serviceReg was not updated for any reason , we need to restore the previous value serviceReg . compareAndSet ( latch , obj ) ; // in any case we need to allow any blocked threads to proceed latch . countDown ( ) ; } } }
public class ConnectorDescriptorImpl { /** * If not already created , a new < code > license < / code > element with the given value will be created . * Otherwise , the existing < code > license < / code > element will be returned . * @ return a new or existing instance of < code > LicenseType < ConnectorDescriptor > < / code > */ public LicenseType < ConnectorDescriptor > getOrCreateLicense ( ) { } }
Node node = model . getOrCreate ( "license" ) ; LicenseType < ConnectorDescriptor > license = new LicenseTypeImpl < ConnectorDescriptor > ( this , "license" , model , node ) ; return license ;
public class Association { /** * Returns for given parameter < i > _ id < / i > the instance of class * { @ link Association } . * @ param _ id id to search in the cache * @ return instance of class { @ link Association } * @ throws CacheReloadException on error */ public static Association get ( final long _id ) throws CacheReloadException { } }
final Cache < Long , Association > cache = InfinispanCache . get ( ) . < Long , Association > getCache ( Association . IDCACHE ) ; if ( ! cache . containsKey ( _id ) && ! Association . getAssociationFromDB ( Association . SQL_ID , _id ) ) { cache . put ( _id , Association . NULL , 100 , TimeUnit . SECONDS ) ; } final Association ret = cache . get ( _id ) ; return ret . equals ( Association . NULL ) ? null : ret ;
public class PaxExamRuntime { /** * Creates and starts a test container using options from a configuration class . * @ param configurationClassName * fully qualified class name of a configuration class . * @ return started test container * @ throws Exception when options cannot be parsed */ public static TestContainer createContainer ( String configurationClassName ) throws Exception { } }
Option [ ] options = getConfigurationOptions ( configurationClassName ) ; ExamSystem system = DefaultExamSystem . create ( options ) ; TestContainer testContainer = PaxExamRuntime . createContainer ( system ) ; testContainer . start ( ) ; return testContainer ;
public class NodeManager { /** * Resolve a host name . * @ param host The host . * @ param type The resource type . * @ return The resolved form . */ public RequestedNode resolve ( String host , ResourceType type ) { } }
RunnableIndices indices = typeToIndices . get ( type ) ; return indices . getOrCreateRequestedNode ( host ) ;
public class FastMath { /** * Compute least significant bit ( Unit in Last Position ) for a number . * @ param x number from which ulp is requested * @ return ulp ( x ) */ public static float ulp ( float x ) { } }
if ( Float . isInfinite ( x ) ) { return Float . POSITIVE_INFINITY ; } return abs ( x - Float . intBitsToFloat ( Float . floatToIntBits ( x ) ^ 1 ) ) ;
public class FaultTolerantScheduler { /** * Schedules the specified task . * @ param task */ public void schedule ( TimerTask task , boolean checkIfAlreadyPresent ) { } }
final TimerTaskData taskData = task . getData ( ) ; final Serializable taskID = taskData . getTaskID ( ) ; task . setScheduler ( this ) ; if ( logger . isDebugEnabled ( ) ) { logger . debug ( "Scheduling task with id " + taskID ) ; } // store the task and data final TimerTaskCacheData timerTaskCacheData = new TimerTaskCacheData ( taskID , baseFqn , cluster ) ; if ( timerTaskCacheData . create ( ) ) { timerTaskCacheData . setTaskData ( taskData ) ; } else if ( checkIfAlreadyPresent ) { throw new IllegalStateException ( "timer task " + taskID + " already scheduled" ) ; } // schedule task final SetTimerAfterTxCommitRunnable setTimerAction = new SetTimerAfterTxCommitRunnable ( task , this ) ; if ( txManager != null ) { try { Transaction tx = txManager . getTransaction ( ) ; if ( tx != null ) { TransactionContext txContext = TransactionContextThreadLocal . getTransactionContext ( ) ; if ( txContext == null ) { txContext = new TransactionContext ( ) ; tx . registerSynchronization ( new TransactionSynchronization ( txContext ) ) ; } txContext . put ( taskID , setTimerAction ) ; task . setSetTimerTransactionalAction ( setTimerAction ) ; } else { setTimerAction . run ( ) ; } } catch ( Throwable e ) { remove ( taskID , true ) ; throw new RuntimeException ( "Unable to register tx synchronization object" , e ) ; } } else { setTimerAction . run ( ) ; }
public class VMMService { /** * ( non - Javadoc ) * @ see com . ibm . websphere . security . wim . ProfileServiceLite # get ( com . ibm . wsspi . security . wim . model . Root ) */ @ Override public Root get ( Root root ) throws WIMException { } }
final String METHODNAME = "get" ; Root result = null ; if ( tc . isDebugEnabled ( ) ) { Tr . debug ( tc , METHODNAME + " " + WIMTraceHelper . trace ( root ) ) ; } result = profileManager . get ( root ) ; if ( tc . isDebugEnabled ( ) ) { Tr . debug ( tc , METHODNAME + " " + WIMTraceHelper . trace ( result ) ) ; } return result ;
public class H2O { /** * Submit to the correct priority queue */ public static < T extends H2OCountedCompleter > T submitTask ( T task ) { } }
int priority = task . priority ( ) ; if ( priority < LOW_PRIORITY_API_WORK ) LOW_PRIORITY_API_WORK_CLASS = task . getClass ( ) . toString ( ) ; assert MIN_PRIORITY <= priority && priority <= MAX_PRIORITY : "priority " + priority + " is out of range, expected range is < " + MIN_PRIORITY + "," + MAX_PRIORITY + ">" ; if ( FJPS [ priority ] == null ) synchronized ( H2O . class ) { if ( FJPS [ priority ] == null ) FJPS [ priority ] = new PrioritizedForkJoinPool ( priority , - 1 ) ; } FJPS [ priority ] . submit ( task ) ; return task ;
public class SqlContextImpl { /** * { @ inheritDoc } * @ see jp . co . future . uroborosql . fluent . SqlFluent # outParam ( java . lang . String , java . sql . SQLType ) */ @ Override public SqlContext outParam ( final String parameterName , final SQLType sqlType ) { } }
return param ( new OutParameter ( parameterName , sqlType ) ) ;
public class JDBCBlob { /** * Retrieves the byte position at which the specified byte array * < code > pattern < / code > begins within the < code > BLOB < / code > * value that this < code > Blob < / code > object represents . The * search for < code > pattern < / code > begins at position * < code > start < / code > . * @ param pattern the byte array for which to search * @ param start the position at which to begin searching ; the * first position is 1 * @ return the position at which the pattern appears , else - 1 * @ exception SQLException if there is an error accessing the * < code > BLOB < / code > or if start is less than 1 * @ exception SQLFeatureNotSupportedException if the JDBC driver does not support * this method * @ since JDK 1.2 , HSQLDB 1.7.2 */ public long position ( final byte [ ] pattern , long start ) throws SQLException { } }
final byte [ ] ldata = data ; checkValid ( ldata ) ; final int dlen = ldata . length ; if ( start < MIN_POS ) { throw Util . outOfRangeArgument ( "start: " + start ) ; } else if ( start > dlen || pattern == null ) { return - 1L ; } else { start -- ; } final int plen = pattern . length ; if ( plen == 0 || start > dlen - plen ) { return - 1L ; } final int stop = dlen - plen ; final byte b0 = pattern [ 0 ] ; outer_loop : for ( int i = ( int ) start ; i <= stop ; i ++ ) { if ( ldata [ i ] != b0 ) { continue ; } int len = plen ; int doffset = i ; int poffset = 0 ; boolean match = true ; while ( len -- > 0 ) { if ( ldata [ doffset ++ ] != pattern [ poffset ++ ] ) { continue outer_loop ; } } return ( i + 1 ) ; } return - 1L ;
public class ZDT1 { /** * Returns the value of the ZDT1 function H . * @ param f First argument of the function H . * @ param g Second argument of the function H . */ protected double evalH ( double f , double g ) { } }
double h ; h = 1.0 - Math . sqrt ( f / g ) ; return h ;
public class NumberSelectorServiceImpl { /** * The main logic is : - Find a perfect match in DB using different formats . * - If not matched , use available Regexes in the organization . - If not * matched , try with the special * match . * @ param phone * @ param sourceOrganizationSid * @ param destinationOrganizationSid * @ param modifiers * @ return */ @ Override public NumberSelectionResult searchNumberWithResult ( String phone , Sid sourceOrganizationSid , Sid destinationOrganizationSid , Set < SearchModifier > modifiers ) { } }
if ( logger . isDebugEnabled ( ) ) { logger . debug ( "getMostOptimalIncomingPhoneNumber: " + phone + ",srcOrg:" + sourceOrganizationSid + ",destOrg:" + destinationOrganizationSid ) ; } List < String > numberQueries = createPhoneQuery ( phone ) ; NumberSelectionResult numberfound = findByNumber ( numberQueries , sourceOrganizationSid , destinationOrganizationSid , modifiers ) ; if ( numberfound . getNumber ( ) == null ) { // only use regex if perfect match didnt worked if ( destinationOrganizationSid != null && ( sourceOrganizationSid == null || destinationOrganizationSid . equals ( sourceOrganizationSid ) ) && phone . matches ( "[\\d,*,#,+]+" ) ) { // check regex if source and dest orgs are the same // only use regex if org available // check if there is a Regex match only if parameter is a String aka phone Number NumberSelectionResult regexFound = findByRegex ( numberQueries , sourceOrganizationSid , destinationOrganizationSid ) ; if ( regexFound . getNumber ( ) != null ) { numberfound = regexFound ; } if ( numberfound . getNumber ( ) == null ) { // if no regex match found , try with special star number in the end NumberSelectionResult starfound = findSingleNumber ( "*" , sourceOrganizationSid , destinationOrganizationSid , modifiers ) ; if ( starfound . getNumber ( ) != null ) { numberfound = new NumberSelectionResult ( starfound . getNumber ( ) , false , ResultType . REGEX ) ; } } } } if ( numberfound . getNumber ( ) == null ) { if ( logger . isDebugEnabled ( ) ) { StringBuffer stringBuffer = new StringBuffer ( ) ; stringBuffer . append ( "NumberSelectionService didn't match a number because: " ) ; if ( destinationOrganizationSid == null ) { stringBuffer . append ( " - Destination Org is null - " ) ; } else if ( sourceOrganizationSid != null && ! destinationOrganizationSid . equals ( sourceOrganizationSid ) ) { stringBuffer . append ( " - Source Org is NOT null and DOESN'T match the Destination Org - " ) ; } else if ( ! phone . matches ( "[\\d,*,#,+]+" ) ) { String msg = String . format ( " - Phone %s doesn't match regex \"[\\\\d,*,#,+]+\" - " , phone ) ; stringBuffer . append ( msg ) ; } else { String msg = String . format ( " - Phone %s didn't match any of the Regex - " , phone ) ; stringBuffer . append ( msg ) ; } logger . debug ( stringBuffer . toString ( ) ) ; } } return numberfound ;
public class AacSettingsMarshaller { /** * Marshall the given parameter object . */ public void marshall ( AacSettings aacSettings , ProtocolMarshaller protocolMarshaller ) { } }
if ( aacSettings == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( aacSettings . getBitrate ( ) , BITRATE_BINDING ) ; protocolMarshaller . marshall ( aacSettings . getCodingMode ( ) , CODINGMODE_BINDING ) ; protocolMarshaller . marshall ( aacSettings . getInputType ( ) , INPUTTYPE_BINDING ) ; protocolMarshaller . marshall ( aacSettings . getProfile ( ) , PROFILE_BINDING ) ; protocolMarshaller . marshall ( aacSettings . getRateControlMode ( ) , RATECONTROLMODE_BINDING ) ; protocolMarshaller . marshall ( aacSettings . getRawFormat ( ) , RAWFORMAT_BINDING ) ; protocolMarshaller . marshall ( aacSettings . getSampleRate ( ) , SAMPLERATE_BINDING ) ; protocolMarshaller . marshall ( aacSettings . getSpec ( ) , SPEC_BINDING ) ; protocolMarshaller . marshall ( aacSettings . getVbrQuality ( ) , VBRQUALITY_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class FileManagerImpl { /** * Main deallocation routine exposed in public interface . This routine * performs a disk read to read the size and then a disk write to mark * the size tag allocated . An optimization to avoid doing the read would * be to store the size of all allocated blocks in a hash table . * @ param block The address of the block to deallocate . * @ exception IOException * @ exception FileManagerException */ public void deallocate ( long block ) throws IOException { } }
int size ; if ( readOnly ) { throw ( new FileManagerException ( "Attempt to deallocate in read only mode" ) ) ; } block = block - HDR_SIZE ; if ( block < FIRST_TAIL ) { throw ( new FileManagerException ( "Illegal block address passed to deallocate" ) ) ; } size = get_block_size ( block ) ; if ( size >= 0 ) { throw ( new FileManagerException ( "Attempt to deallocate block with illegal size" ) ) ; } size = abs ( size ) ; deallocs ++ ; seek_and_count ( block ) ; writeInt ( size ) ; add_to_freelist ( block , size ) ; allocated_blocks -- ; allocated_words -= size ;
public class SDBaseOps { /** * Variance array reduction operation , optionally along specified dimensions < br > * Note that if keepDims = true , the output variable has the same rank as the input variable , * with the reduced dimensions having size 1 . This can be useful for later broadcast operations ( such as subtracting * the mean along a dimension ) . < br > * Example : if input has shape [ a , b , c ] and dimensions = [ 1 ] then output has shape : * keepDims = true : [ a , 1 , c ] < br > * keepDims = false : [ a , c ] * @ param name Output variable name * @ param x Input variable * @ param biasCorrected If true : divide by ( N - 1 ) ( i . e . , sample variable ) . If false : divide by N ( population variance ) * @ param keepDims If true : keep the dimensions that are reduced on ( as size 1 ) . False : remove the reduction dimensions * @ param dimensions Dimensions to reduce over . If dimensions are not specified , full array reduction is performed * @ return Output variable : reduced array of rank ( input rank - num dimensions ) */ public SDVariable variance ( String name , @ NonNull SDVariable x , boolean biasCorrected , boolean keepDims , int ... dimensions ) { } }
validateNumerical ( "variance" , x ) ; SDVariable result = f ( ) . variance ( x , biasCorrected , keepDims , dimensions ) ; return updateVariableNameAndReference ( result , name ) ;
public class DomConfigurationWriter { /** * Gets the XML element that represents the { @ link DatastoreCatalog } . * @ return */ public Element getDatastoreCatalogElement ( ) { } }
final Element configurationFileDocumentElement = getDocumentElement ( ) ; final Element datastoreCatalogElement = getOrCreateChildElementByTagName ( configurationFileDocumentElement , "datastore-catalog" ) ; if ( datastoreCatalogElement == null ) { throw new IllegalStateException ( "Could not find <datastore-catalog> element in configuration file" ) ; } return datastoreCatalogElement ;
public class CommonOps_DDF3 { /** * Returns the absolute value of the element in the matrix that has the smallest absolute value . < br > * < br > * Min { | a < sub > ij < / sub > | } for all i and j < br > * @ param a A matrix . Not modified . * @ return The max element value of the matrix . */ public static double elementMinAbs ( DMatrix3x3 a ) { } }
double min = Math . abs ( a . a11 ) ; double tmp = Math . abs ( a . a12 ) ; if ( tmp < min ) min = tmp ; tmp = Math . abs ( a . a13 ) ; if ( tmp < min ) min = tmp ; tmp = Math . abs ( a . a21 ) ; if ( tmp < min ) min = tmp ; tmp = Math . abs ( a . a22 ) ; if ( tmp < min ) min = tmp ; tmp = Math . abs ( a . a23 ) ; if ( tmp < min ) min = tmp ; tmp = Math . abs ( a . a31 ) ; if ( tmp < min ) min = tmp ; tmp = Math . abs ( a . a32 ) ; if ( tmp < min ) min = tmp ; tmp = Math . abs ( a . a33 ) ; if ( tmp < min ) min = tmp ; return min ;
public class Global { /** * Attributes to include in mapping . * @ param includedAttributes names of the attributes to include in the mapping * @ return this instance of JMapperGlobal */ public Global includedAttributes ( String ... includedAttributes ) { } }
for ( String attribute : includedAttributes ) global . attributes . add ( new LocalAttribute ( attribute ) . toXStream ( ) ) ; return this ;
public class ScreenInGridScreen { /** * SetupSFields Method . */ public void setupSFields ( ) { } }
this . getRecord ( ScreenIn . SCREEN_IN_FILE ) . getField ( ScreenIn . SCREEN_ITEM_NUMBER ) . setupDefaultView ( this . getNextLocation ( ScreenConstants . NEXT_LOGICAL , ScreenConstants . ANCHOR_DEFAULT ) , this , ScreenConstants . DEFAULT_DISPLAY ) ; this . getRecord ( ScreenIn . SCREEN_IN_FILE ) . getField ( ScreenIn . SCREEN_FIELD_NAME ) . setupDefaultView ( this . getNextLocation ( ScreenConstants . NEXT_LOGICAL , ScreenConstants . ANCHOR_DEFAULT ) , this , ScreenConstants . DEFAULT_DISPLAY ) ; this . getRecord ( ScreenIn . SCREEN_IN_FILE ) . getField ( ScreenIn . SCREEN_LOCATION ) . setupDefaultView ( this . getNextLocation ( ScreenConstants . NEXT_LOGICAL , ScreenConstants . ANCHOR_DEFAULT ) , this , ScreenConstants . DEFAULT_DISPLAY ) ; this . getRecord ( ScreenIn . SCREEN_IN_FILE ) . getField ( ScreenIn . SCREEN_ANCHOR ) . setupDefaultView ( this . getNextLocation ( ScreenConstants . NEXT_LOGICAL , ScreenConstants . ANCHOR_DEFAULT ) , this , ScreenConstants . DEFAULT_DISPLAY ) ;
public class AmazonSQSExtendedClient { /** * Deletes the specified message from the specified queue and deletes the * message payload from Amazon S3 when necessary . You specify the message by * using the message ' s < code > receipt handle < / code > and not the * < code > message ID < / code > you received when you sent the message . Even if * the message is locked by another reader due to the visibility timeout * setting , it is still deleted from the queue . If you leave a message in * the queue for longer than the queue ' s configured retention period , Amazon * SQS automatically deletes it . * < b > NOTE : < / b > The receipt handle is associated with a specific instance of * receiving the message . If you receive a message more than once , the * receipt handle you get each time you receive the message is different . * When you request DeleteMessage , if you don ' t provide the most recently * received receipt handle for the message , the request will still succeed , * but the message might not be deleted . * < b > IMPORTANT : < / b > It is possible you will receive a message even after * you have deleted it . This might happen on rare occasions if one of the * servers storing a copy of the message is unavailable when you request to * delete the message . The copy remains on the server and might be returned * to you again on a subsequent receive request . You should create your * system to be idempotent so that receiving a particular message more than * once is not a problem . * @ param deleteMessageRequest * Container for the necessary parameters to execute the * DeleteMessage service method on AmazonSQS . * @ return The response from the DeleteMessage service method , as returned * by AmazonSQS . * @ throws ReceiptHandleIsInvalidException * @ throws InvalidIdFormatException * @ throws AmazonClientException * If any internal errors are encountered inside the client * while attempting to make the request or handle the response . * For example if a network connection is not available . * @ throws AmazonServiceException * If an error response is returned by AmazonSQS indicating * either a problem with the data in the request , or a server * side issue . */ public DeleteMessageResult deleteMessage ( DeleteMessageRequest deleteMessageRequest ) { } }
if ( deleteMessageRequest == null ) { String errorMessage = "deleteMessageRequest cannot be null." ; LOG . error ( errorMessage ) ; throw new AmazonClientException ( errorMessage ) ; } deleteMessageRequest . getRequestClientOptions ( ) . appendUserAgent ( SQSExtendedClientConstants . USER_AGENT_HEADER ) ; if ( ! clientConfiguration . isLargePayloadSupportEnabled ( ) ) { return super . deleteMessage ( deleteMessageRequest ) ; } String receiptHandle = deleteMessageRequest . getReceiptHandle ( ) ; String origReceiptHandle = receiptHandle ; if ( isS3ReceiptHandle ( receiptHandle ) ) { deleteMessagePayloadFromS3 ( receiptHandle ) ; origReceiptHandle = getOrigReceiptHandle ( receiptHandle ) ; } deleteMessageRequest . setReceiptHandle ( origReceiptHandle ) ; return super . deleteMessage ( deleteMessageRequest ) ;
public class CommandLine { /** * Delegates to the implementation of { @ link # executeHelpRequest ( ParseResult ) } . * @ deprecated use { @ link # executeHelpRequest ( ParseResult ) } instead * @ param parsedCommands the list of { @ code CommandLine } objects to check if help was requested * @ param out the { @ code PrintStream } to print help to if requested * @ param err the error string to print diagnostic messages to , in addition to the output from the exception handler * @ param colorScheme for printing help messages using ANSI styles and colors * @ return { @ code true } if help was printed , { @ code false } otherwise * @ since 3.6 */ @ Deprecated public static boolean printHelpIfRequested ( List < CommandLine > parsedCommands , PrintStream out , PrintStream err , Help . ColorScheme colorScheme ) { } }
// for backwards compatibility for ( CommandLine cmd : parsedCommands ) { cmd . setOut ( new PrintWriter ( out , true ) ) . setErr ( new PrintWriter ( err , true ) ) . setColorScheme ( colorScheme ) ; } return executeHelpRequest ( parsedCommands ) != null ;
public class AppUtil { /** * Starts the camera app in order to capture a picture . If an error occurs while starting the * camera app , an { @ link ActivityNotFoundException } will be thrown . * @ param activity * The activity , the captured picture should be passed to by calling its * < code > onActivityResult < / code > method , as an instance of the class { @ link Activity } . * The activity may not be null * @ param requestCode * The request code , which should be used to pass the captured picture to the given * activity , as an { @ link Integer } value * @ param file * The file , the captured picture should be saved to , as an instance of the class { @ link * File } . The file may not be null . The file must exist and must not be a directory . */ public static void startCameraApp ( @ NonNull final Activity activity , final int requestCode , @ NonNull final File file ) { } }
Condition . INSTANCE . ensureNotNull ( file , "The file may not be null" ) ; Condition . INSTANCE . ensureFileIsNoDirectory ( file , "The file must exist and must not be a directory" ) ; startCameraApp ( activity , requestCode , Uri . fromFile ( file ) ) ;
public class CarouselControlRenderer { /** * This methods receives and processes input made by the user . More * specifically , it checks whether the user has interacted with the current * b : carouselControl . The default implementation simply stores the input * value in the list of submitted values . If the validation checks are * passed , the values in the < code > submittedValues < / code > list are store in * the backend bean . * @ param context * the FacesContext . * @ param component * the current b : carouselControl . */ @ Override public void decode ( FacesContext context , UIComponent component ) { } }
CarouselControl carouselControl = ( CarouselControl ) component ; if ( carouselControl . isDisabled ( ) ) { return ; } new AJAXRenderer ( ) . decode ( context , component ) ;
public class StoreRoutingPlan { /** * TODO : add unit test . */ public int getZoneNaryForNodesPartition ( int zoneId , int nodeId , int partitionId ) { } }
if ( cluster . getNodeById ( nodeId ) . getZoneId ( ) != zoneId ) { throw new VoldemortException ( "Node " + nodeId + " is not in zone " + zoneId + "! The node is in zone " + cluster . getNodeById ( nodeId ) . getZoneId ( ) ) ; } List < Integer > replicatingNodeIds = getReplicationNodeList ( partitionId ) ; int zoneNAry = - 1 ; for ( int replicatingNodeId : replicatingNodeIds ) { Node replicatingNode = cluster . getNodeById ( replicatingNodeId ) ; // bump up the replica number once you encounter a node in the given // zone if ( replicatingNode . getZoneId ( ) == zoneId ) { zoneNAry ++ ; } if ( replicatingNodeId == nodeId ) { return zoneNAry ; } } if ( zoneNAry > 0 ) { throw new VoldemortException ( "Node " + nodeId + " not a replica for partition " + partitionId + " in given zone " + zoneId ) ; } else { throw new VoldemortException ( "Could not find any replicas for partition Id " + partitionId + " in given zone " + zoneId ) ; }
public class Level { /** * Adds a Content Specification Topic to the Level . If the Topic already has a parent , then it is removed from that parent * and added to this level . * @ param specTopic The Content Specification Topic to be added to the level . */ public void appendSpecTopic ( final SpecTopic specTopic ) { } }
topics . add ( specTopic ) ; nodes . add ( specTopic ) ; if ( specTopic . getParent ( ) != null && specTopic . getParent ( ) instanceof Level ) { ( ( Level ) specTopic . getParent ( ) ) . removeSpecTopic ( specTopic ) ; } specTopic . setParent ( this ) ;
public class Document { /** * Returns all annotations contained within the document . * @ return List of annotations , which may be empty */ public List < Annotation > getAllAnnotations ( ) { } }
List < Annotation > ret = new ArrayList < Annotation > ( ) ; for ( StatementGroup sg : statementGroups ) { ret . addAll ( sg . getAllAnnotations ( ) ) ; } return ret ;
public class BaasUser { /** * Asynchronously requests to unfollow the user * @ param flags { @ link RequestOptions } * @ param handler an handler to be invoked when the request completes * @ return a { @ link com . baasbox . android . RequestToken } to manage the request */ public RequestToken unfollow ( int flags , BaasHandler < BaasUser > handler ) { } }
BaasBox box = BaasBox . getDefaultChecked ( ) ; Follow follow = new Follow ( box , false , this , flags , handler ) ; return box . submitAsync ( follow ) ;
public class AnalysisScreen { /** * This is field a potential index for analysis . * ( Override this to allow number fields to be keys ) . * @ param field The field to check . * @ return True if this is a potential key field . */ public boolean isKeyField ( BaseField field , int iSourceFieldSeq ) { } }
if ( field instanceof DateTimeField ) return true ; if ( field instanceof NumberField ) return false ; if ( iSourceFieldSeq == 0 ) return false ; // You must have at least one key field . return true ; // Typically any non - number is a key .
public class JsonNominatimClient { /** * { @ inheritDoc } * @ see fr . dudie . nominatim . client . NominatimClient # lookupAddress ( java . util . List ) */ @ Override public List < Address > lookupAddress ( final List < String > typeId ) throws IOException { } }
final NominatimLookupRequest q = new NominatimLookupRequest ( ) ; q . setQuery ( typeId ) ; return this . lookupAddress ( q ) ;
public class DefaultExamSystem { /** * Create a new system based on * this * . The forked System remembers the forked instances in * order to clear resources up ( if desired ) . */ @ Override public ExamSystem fork ( Option [ ] options ) { } }
try { ExamSystem sys = new DefaultExamSystem ( combine ( combinedOptions , options ) ) ; subsystems . add ( sys ) ; return sys ; } catch ( IOException exc ) { throw new TestContainerException ( exc ) ; }
public class ReaderInputStream { /** * Resets the StringReader . * @ exception IOException * if the StringReader fails to be reset */ @ Override public synchronized void reset ( ) throws IOException { } }
if ( in == null ) { throw new IOException ( "Stream Closed" ) ; } slack = null ; in . reset ( ) ;
public class UserGroupInformation { /** * Run the given action as the user . * @ param < T > the return type of the run method * @ param action the method to execute * @ return the value from the run method */ public < T > T doAs ( PrivilegedAction < T > action ) { } }
return Subject . doAs ( null , action ) ;
public class PortablePathCursor { /** * Sets the index to the given index without validating . If the index is out of bound the consecutive token ( ) call * will throw a runtime exception . * @ param indexToNavigateTo value to set the cursor ' s index to . */ void index ( int indexToNavigateTo ) { } }
this . index = 0 ; this . offset = 0 ; this . nextSplit = StringUtil . indexOf ( path , '.' , 0 ) ; this . token = null ; for ( int i = 1 ; i <= indexToNavigateTo ; i ++ ) { if ( ! advanceToNextToken ( ) ) { throw new IndexOutOfBoundsException ( "Index out of bound " + indexToNavigateTo + " in " + path ) ; } }
public class IterativeDataSet { /** * Closes the iteration . This method defines the end of the iterative program part . * @ param iterationResult The data set that will be fed back to the next iteration . * @ return The DataSet that represents the result of the iteration , after the computation has terminated . * @ see DataSet # iterate ( int ) */ public DataSet < T > closeWith ( DataSet < T > iterationResult ) { } }
return new BulkIterationResultSet < T > ( getExecutionEnvironment ( ) , getType ( ) , this , iterationResult ) ;
public class NotionalTokenizer { /** * 分词 * @ param text 文本 * @ return 分词结果 */ public static List < Term > segment ( char [ ] text ) { } }
List < Term > resultList = SEGMENT . seg ( text ) ; ListIterator < Term > listIterator = resultList . listIterator ( ) ; while ( listIterator . hasNext ( ) ) { if ( ! CoreStopWordDictionary . shouldInclude ( listIterator . next ( ) ) ) { listIterator . remove ( ) ; } } return resultList ;
public class Evaluator { /** * Add subscriptions for a given group . * @ param group * Name of the group . At Netflix this is typically the cluster that includes * the instance reporting data . * @ param subs * Set of subscriptions for the group . * @ return * This instance for chaining of update operations . */ public Evaluator addGroupSubscriptions ( String group , List < Subscription > subs ) { } }
List < Subscription > oldSubs = subscriptions . put ( group , subs ) ; if ( oldSubs == null ) { LOGGER . debug ( "added group {} with {} subscriptions" , group , subs . size ( ) ) ; } else { LOGGER . debug ( "updated group {}, {} subscriptions before, {} subscriptions now" , group , oldSubs . size ( ) , subs . size ( ) ) ; } return this ;
public class RtfParser { /** * Handles control word tokens . Depending on the current * state a control word can lead to a state change . When * parsing the actual document contents , certain tabled * values are remapped . i . e . colors , fonts , styles , etc . * @ param ctrlWordData The control word to handle . * @ return errOK if ok , other if an error occurred . * @ since 2.1.3 */ public int handleCtrlWord ( RtfCtrlWordData ctrlWordData ) { } }
int result = errOK ; this . ctrlWordCount ++ ; // stats if ( debugParser ) { RtfParser . outputDebug ( this . rtfDoc , groupLevel , "DEBUG: handleCtrlWord=" + ctrlWordData . ctrlWord + " param=[" + ctrlWordData . param + "]" ) ; } if ( this . getTokeniserState ( ) == TOKENISER_SKIP_GROUP ) { this . ctrlWordSkippedCount ++ ; if ( debugParser ) { RtfParser . outputDebug ( this . rtfDoc , groupLevel , "DEBUG: SKIPPED" ) ; } return result ; } // RtfDestination dest = ( RtfDestination ) this . getCurrentDestination ( ) ; // boolean handled = false ; // if ( dest ! = null ) { // handled = dest . handleControlWord ( ctrlWordData ) ; result = this . rtfKeywordMgr . handleKeyword ( ctrlWordData , this . groupLevel ) ; if ( result == errOK ) { this . ctrlWordHandledCount ++ ; } else { this . ctrlWordNotHandledCount ++ ; result = errOK ; // hack for now . } return result ;
public class EncodingUtils { /** * Get the JVM Converter for the specified encoding . * @ param encoding The encoding . * @ return The converter if it exists , otherwise return the encoding . */ public static String getJvmConverter ( String encoding ) { } }
init ( ) ; // String converter = ( String ) _ converterMap . get ( encoding . toLowerCase ( ) ) ; String converter = null ; com . ibm . wsspi . http . EncodingUtils encodingUtils = com . ibm . ws . webcontainer . osgi . WebContainer . getEncodingUtils ( ) ; if ( encodingUtils != null ) { converter = encodingUtils . getJvmConverter ( encoding ) ; } if ( converter != null ) { return converter ; } else { return encoding ; }
public class netbridge { /** * Use this API to fetch filtered set of netbridge resources . * filter string should be in JSON format . eg : " port : 80 , servicetype : HTTP " . */ public static netbridge [ ] get_filtered ( nitro_service service , String filter ) throws Exception { } }
netbridge obj = new netbridge ( ) ; options option = new options ( ) ; option . set_filter ( filter ) ; netbridge [ ] response = ( netbridge [ ] ) obj . getfiltered ( service , option ) ; return response ;
public class InternalXtextParser { /** * InternalXtext . g : 350:1 : ruleAbstractMetamodelDeclaration returns [ EObject current = null ] : ( this _ GeneratedMetamodel _ 0 = ruleGeneratedMetamodel | this _ ReferencedMetamodel _ 1 = ruleReferencedMetamodel ) ; */ public final EObject ruleAbstractMetamodelDeclaration ( ) throws RecognitionException { } }
EObject current = null ; EObject this_GeneratedMetamodel_0 = null ; EObject this_ReferencedMetamodel_1 = null ; enterRule ( ) ; try { // InternalXtext . g : 356:2 : ( ( this _ GeneratedMetamodel _ 0 = ruleGeneratedMetamodel | this _ ReferencedMetamodel _ 1 = ruleReferencedMetamodel ) ) // InternalXtext . g : 357:2 : ( this _ GeneratedMetamodel _ 0 = ruleGeneratedMetamodel | this _ ReferencedMetamodel _ 1 = ruleReferencedMetamodel ) { // InternalXtext . g : 357:2 : ( this _ GeneratedMetamodel _ 0 = ruleGeneratedMetamodel | this _ ReferencedMetamodel _ 1 = ruleReferencedMetamodel ) int alt10 = 2 ; int LA10_0 = input . LA ( 1 ) ; if ( ( LA10_0 == 18 ) ) { alt10 = 1 ; } else if ( ( LA10_0 == 20 ) ) { alt10 = 2 ; } else { NoViableAltException nvae = new NoViableAltException ( "" , 10 , 0 , input ) ; throw nvae ; } switch ( alt10 ) { case 1 : // InternalXtext . g : 358:3 : this _ GeneratedMetamodel _ 0 = ruleGeneratedMetamodel { newCompositeNode ( grammarAccess . getAbstractMetamodelDeclarationAccess ( ) . getGeneratedMetamodelParserRuleCall_0 ( ) ) ; pushFollow ( FollowSets000 . FOLLOW_2 ) ; this_GeneratedMetamodel_0 = ruleGeneratedMetamodel ( ) ; state . _fsp -- ; current = this_GeneratedMetamodel_0 ; afterParserOrEnumRuleCall ( ) ; } break ; case 2 : // InternalXtext . g : 367:3 : this _ ReferencedMetamodel _ 1 = ruleReferencedMetamodel { newCompositeNode ( grammarAccess . getAbstractMetamodelDeclarationAccess ( ) . getReferencedMetamodelParserRuleCall_1 ( ) ) ; pushFollow ( FollowSets000 . FOLLOW_2 ) ; this_ReferencedMetamodel_1 = ruleReferencedMetamodel ( ) ; state . _fsp -- ; current = this_ReferencedMetamodel_1 ; afterParserOrEnumRuleCall ( ) ; } break ; } } leaveRule ( ) ; } catch ( RecognitionException re ) { recover ( input , re ) ; appendSkippedTokens ( ) ; } finally { } return current ;
public class HttpMethodBase { /** * Generates all the required request { @ link Header header } s * to be submitted via the given { @ link HttpConnection connection } . * This implementation adds < tt > User - Agent < / tt > , < tt > Host < / tt > , * < tt > Cookie < / tt > , < tt > Authorization < / tt > , < tt > Proxy - Authorization < / tt > * and < tt > Proxy - Connection < / tt > headers , when appropriate . * Subclasses may want to override this method to to add additional * headers , and may choose to invoke this implementation ( via * < tt > super < / tt > ) to add the " standard " headers . * @ param state the { @ link HttpState state } information associated with this method * @ param conn the { @ link HttpConnection connection } used to execute * this HTTP method * @ throws IOException if an I / O ( transport ) error occurs . Some transport exceptions * can be recovered from . * @ throws HttpException if a protocol exception occurs . Usually protocol exceptions * cannot be recovered from . * @ see # writeRequestHeaders */ protected void addRequestHeaders ( HttpState state , HttpConnection conn ) throws IOException , HttpException { } }
LOG . trace ( "enter HttpMethodBase.addRequestHeaders(HttpState, " + "HttpConnection)" ) ; addHostRequestHeader ( state , conn ) ; addCookieRequestHeader ( state , conn ) ; addProxyConnectionHeader ( state , conn ) ;
public class ControlConnection { /** * Initializes the control connection . If it is already initialized , this is a no - op and all * parameters are ignored . * @ param listenToClusterEvents whether to register for TOPOLOGY _ CHANGE and STATUS _ CHANGE events . * If the control connection has already initialized with another value , this is ignored . * SCHEMA _ CHANGE events are always registered . * @ param reconnectOnFailure whether to schedule a reconnection if the initial attempt fails ( this * does not affect the returned future , which always represent the outcome of the initial * attempt only ) . * @ param useInitialReconnectionSchedule if no node can be reached , the type of reconnection * schedule to use . In other words , the value that will be passed to { @ link * ReconnectionPolicy # newControlConnectionSchedule ( boolean ) } . Note that this parameter is only * relevant if { @ code reconnectOnFailure } is true , otherwise it is not used . */ public CompletionStage < Void > init ( boolean listenToClusterEvents , boolean reconnectOnFailure , boolean useInitialReconnectionSchedule ) { } }
RunOrSchedule . on ( adminExecutor , ( ) -> singleThreaded . init ( listenToClusterEvents , reconnectOnFailure , useInitialReconnectionSchedule ) ) ; return singleThreaded . initFuture ;
public class AbstractControllerServer { /** * { @ inheritDoc } * @ throws InterruptedException { @ inheritDoc } * @ throws CouldNotPerformException { @ inheritDoc } */ @ Override public synchronized void deactivate ( ) throws InterruptedException , CouldNotPerformException { } }
manageLock . lockWrite ( this ) ; try { try { validateInitialization ( ) ; } catch ( InvalidStateException ex ) { // was never initialized ! return ; } // skip initial data sync if still running if ( initialDataSyncFuture != null && ! initialDataSyncFuture . isDone ( ) ) { initialDataSyncFuture . cancel ( true ) ; } logger . debug ( "Deactivate AbstractControllerServer for: " + this ) ; // The order is important : The informer publishes a zero event when the availabilityState is set to deactivating which leads remotes to disconnect // The remotes try to reconnect again and start a requestData . If the server is still active it will respond // and the remotes will think that the server is still there . . if ( serverWatchDog != null ) { serverWatchDog . deactivate ( ) ; } // inform remotes about deactivation setAvailabilityState ( DEACTIVATING ) ; if ( informerWatchDog != null ) { informerWatchDog . deactivate ( ) ; } setAvailabilityState ( OFFLINE ) ; } finally { manageLock . unlockWrite ( this ) ; }
public class BaseRedisQueue { /** * Name of the Redis sorted - set to store ephemeral message ids . * @ param redisSortedSetName * @ return */ public BaseRedisQueue < ID , DATA > setRedisSortedSetName ( String redisSortedSetName ) { } }
_redisSortedSetName = redisSortedSetName ; this . redisSortedSetName = _redisSortedSetName . getBytes ( QueueUtils . UTF8 ) ; return this ;
public class LocalQueuePoint { /** * / * The method returns the iterator containing the items stored on the queue point in messagestore . It retrieves * the messages starting from position at fromIndex in the subcursor holding the linkedlist of all items , and * ending at toIndex in the subcursor . The total retrieved items are equal to or less than totalNumberOfMsgsPerPage * on console . * The method iteratively calls the cursor . next ( fromIndex ) method and breaks when either counter equals toIndex * or the totalQueuedMessages on the queuepoint is reached . Each call to cursor . next ( fromIndex ) returns a single * item from messagestore which is positioned equal to fromIndex or greater than fromIndex in the linkedlist of items . */ public SIMPIterator getQueuedMessageIterator ( int fromIndex , int toIndex , int totalNumberOfMsgsPerPage ) throws SIMPRuntimeOperationFailedException { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "getQueuedMessageIterator(int fromIndex= " + fromIndex + ", int toIndex= " + toIndex + ", int totalNumberOfMsgsPerPage= " + totalNumberOfMsgsPerPage ) ; List < ControlAdapter > messages = new ArrayList < ControlAdapter > ( ) ; NonLockingCursor cursor = null ; int iCount = fromIndex - 1 ; try { cursor = itemStream . newNonLockingItemCursor ( null ) ; cursor . allowUnavailableItems ( ) ; long totalQueuedMessages = getNumberOfQueuedMessages ( ) ; MessageItem item = ( MessageItem ) cursor . next ( fromIndex - 1 ) ; // 673411 while ( item != null ) { if ( iCount >= toIndex || ( iCount > totalQueuedMessages ) ) { break ; } // force the arrival time to be written to the jsMessage item . forceCurrentMEArrivalTimeToJsMessage ( ) ; // It ' s possible to get a null adapter back ControlAdapter cAdapter = item . getControlAdapter ( ) ; if ( cAdapter != null ) { if ( ( ( iCount >= ( fromIndex - 1 ) ) && iCount <= toIndex ) && messages . size ( ) <= totalNumberOfMsgsPerPage ) { messages . add ( cAdapter ) ; } } AbstractItem abItem = cursor . next ( fromIndex - 1 ) ; // 673411 if ( abItem != null ) { item = ( MessageItem ) abItem ; } else { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) SibTr . debug ( tc , " Item retrieved from messagestore is null at index" + iCount ) ; break ; } iCount ++ ; } } catch ( MessageStoreException e ) { FFDCFilter . processException ( e , "com.ibm.ws.sib.processor.runtime.LocalQueuePoint.getQueuedMessageIterator" , "1:263:1.57" , this ) ; SibTr . exception ( tc , e ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "getQueuedMessageIterator" , e ) ; throw new SIMPRuntimeOperationFailedException ( e ) ; } finally { if ( cursor != null ) cursor . finished ( ) ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "getQueuedMessageIterator fromIndex=" + fromIndex + " toIndex= " + toIndex + " totalMessagesPerpage= " + totalNumberOfMsgsPerPage ) ; return new BasicSIMPIterator ( messages . iterator ( ) ) ;
public class FileBasedSource { /** * Initialize the logger . * @ param state Source state */ protected void initLogger ( SourceState state ) { } }
StringBuilder sb = new StringBuilder ( ) ; sb . append ( "[" ) ; sb . append ( Strings . nullToEmpty ( state . getProp ( ConfigurationKeys . SOURCE_ENTITY ) ) ) ; sb . append ( "]" ) ; MDC . put ( "sourceInfo" , sb . toString ( ) ) ;
public class EnvironmentLogger { /** * True if the level is loggable */ @ Override public final boolean isLoggable ( Level level ) { } }
if ( level == null ) return false ; int intValue = level . intValue ( ) ; if ( intValue < _finestEffectiveLevelValue ) return false ; else if ( ! _hasLocalEffectiveLevel ) { return true ; } else { Integer localValue = _localEffectiveLevel . get ( ) ; if ( localValue != null ) { int localIntValue = localValue . intValue ( ) ; if ( localIntValue == Level . OFF . intValue ( ) ) return false ; else return localIntValue <= intValue ; } else { if ( _systemEffectiveLevelValue == Level . OFF . intValue ( ) ) return false ; else return _systemEffectiveLevelValue <= intValue ; } }
public class CommerceAddressRestrictionUtil { /** * Returns the commerce address restriction with the primary key or throws a { @ link NoSuchAddressRestrictionException } if it could not be found . * @ param commerceAddressRestrictionId the primary key of the commerce address restriction * @ return the commerce address restriction * @ throws NoSuchAddressRestrictionException if a commerce address restriction with the primary key could not be found */ public static CommerceAddressRestriction findByPrimaryKey ( long commerceAddressRestrictionId ) throws com . liferay . commerce . exception . NoSuchAddressRestrictionException { } }
return getPersistence ( ) . findByPrimaryKey ( commerceAddressRestrictionId ) ;
public class ComboButton { /** * a simple test app */ public static void main ( final String [ ] args ) { } }
LookAndFeelManager . get ( ) . init ( ) ; final ComboButton comboButton1 = new ComboButton ( ) ; comboButton1 . addButton ( "Foo!" , IconUtils . ACTION_ADD_DARK , true ) ; comboButton1 . addButton ( "Boo!" , IconUtils . ACTION_REMOVE_DARK , true ) ; final ComboButton comboButton2 = new ComboButton ( ) ; comboButton2 . addButton ( "Foo!" , IconUtils . ACTION_ADD_DARK , false ) ; comboButton2 . addButton ( "Boo!" , IconUtils . ACTION_REMOVE_DARK , false ) ; comboButton2 . addButton ( "Mrr!" , IconUtils . ACTION_REFRESH , true ) ; comboButton2 . addButton ( "Rrrh!" , IconUtils . ACTION_DRILL_TO_DETAIL , true ) ; final DCPanel panel = new DCPanel ( WidgetUtils . COLOR_DEFAULT_BACKGROUND ) ; panel . add ( comboButton1 ) ; panel . add ( comboButton2 ) ; final JButton regularButton = WidgetFactory . createDefaultButton ( "Regular button" , IconUtils . ACTION_ADD_DARK ) ; panel . add ( regularButton ) ; final JFrame frame = new JFrame ( "test" ) ; frame . setDefaultCloseOperation ( JFrame . EXIT_ON_CLOSE ) ; frame . setSize ( 500 , 400 ) ; frame . add ( panel ) ; frame . pack ( ) ; frame . setVisible ( true ) ;
public class DefaultTypeCache { /** * ( non - Javadoc ) * @ see * org . apache . atlas . typesystem . types . cache . TypeCache # putAll ( java * . util . Collection ) */ @ Override public void putAll ( Collection < IDataType > types ) throws AtlasException { } }
for ( IDataType type : types ) { assertValidType ( type ) ; types_ . put ( type . getName ( ) , type ) ; }
public class StreamExecutionEnvironment { /** * Adds a new Kryo default serializer to the Runtime . * @ param type * The class of the types serialized with the given serializer . * @ param serializerClass * The class of the serializer to use . */ public void addDefaultKryoSerializer ( Class < ? > type , Class < ? extends Serializer < ? > > serializerClass ) { } }
config . addDefaultKryoSerializer ( type , serializerClass ) ;
public class HtmlDocletWriter { /** * Adds the summary content . * @ param element the Element for which the summary will be generated * @ param htmltree the documentation tree to which the summary will be added */ public void addSummaryComment ( Element element , Content htmltree ) { } }
addSummaryComment ( element , utils . getFirstSentenceTrees ( element ) , htmltree ) ;
public class XPathParser { /** * The value of the Literal is the sequence of characters inside * the " or ' characters > . * Literal : : = ' " ' [ ^ " ] * ' " ' * @ throws javax . xml . transform . TransformerException */ protected void Literal ( ) throws javax . xml . transform . TransformerException { } }
int last = m_token . length ( ) - 1 ; char c0 = m_tokenChar ; char cX = m_token . charAt ( last ) ; if ( ( ( c0 == '\"' ) && ( cX == '\"' ) ) || ( ( c0 == '\'' ) && ( cX == '\'' ) ) ) { // Mutate the token to remove the quotes and have the XString object // already made . int tokenQueuePos = m_queueMark - 1 ; m_ops . m_tokenQueue . setElementAt ( null , tokenQueuePos ) ; Object obj = new XString ( m_token . substring ( 1 , last ) ) ; m_ops . m_tokenQueue . setElementAt ( obj , tokenQueuePos ) ; // lit = m _ token . substring ( 1 , last ) ; m_ops . setOp ( m_ops . getOp ( OpMap . MAPINDEX_LENGTH ) , tokenQueuePos ) ; m_ops . setOp ( OpMap . MAPINDEX_LENGTH , m_ops . getOp ( OpMap . MAPINDEX_LENGTH ) + 1 ) ; nextToken ( ) ; } else { error ( XPATHErrorResources . ER_PATTERN_LITERAL_NEEDS_BE_QUOTED , new Object [ ] { m_token } ) ; // " Pattern literal ( " + m _ token + " ) needs to be quoted ! " ) ; }
public class LeafNode { /** * Publishes multiple events to the node . Same rules apply as in { @ link # publish ( Item ) } . * In addition , if { @ link ConfigureForm # isPersistItems ( ) } = false , only the last item in the input * list will get stored on the node , assuming it stores the last sent item . * @ param items - The collection of items being sent * @ param < T > type of the items . * @ throws NotConnectedException * @ throws InterruptedException * @ throws XMPPErrorException * @ throws NoResponseException * @ deprecated use { @ link # publish ( Collection ) } instead . */ @ Deprecated public < T extends Item > void send ( Collection < T > items ) throws NotConnectedException , InterruptedException , NoResponseException , XMPPErrorException { } }
publish ( items ) ;
public class SpecializedOps_ZDRM { /** * Returns the magnitude squared of the complex element along the diagonal with the largest magnitude < br > * < br > * Max { | a < sub > ij < / sub > | ^ 2 } for all i and j < br > * @ param a A matrix . Not modified . * @ return The max magnitude squared */ public static double elementDiagMaxMagnitude2 ( ZMatrixRMaj a ) { } }
final int size = Math . min ( a . numRows , a . numCols ) ; int rowStride = a . getRowStride ( ) ; double max = 0 ; for ( int i = 0 ; i < size ; i ++ ) { int index = i * rowStride + i * 2 ; double real = a . data [ index ] ; double imaginary = a . data [ index + 1 ] ; double m = real * real + imaginary * imaginary ; if ( m > max ) { max = m ; } } return max ;
public class Model { /** * Generates a XML document from content of this model . * @ param pretty pretty format ( human readable ) , or one line text . * @ param declaration true to include XML declaration at the top * @ param attributeNames list of attributes to include . No arguments = = include all attributes . * @ return generated XML . */ public String toXml ( boolean pretty , boolean declaration , String ... attributeNames ) { } }
StringBuilder sb = new StringBuilder ( ) ; if ( declaration ) { sb . append ( "<?xml version=\"1.0\" encoding=\"UTF-8\"?>" ) ; if ( pretty ) { sb . append ( '\n' ) ; } } toXmlP ( sb , pretty , "" , attributeNames ) ; return sb . toString ( ) ;
public class CommandFaceDescriptor { /** * { @ inheritDoc } */ public void setLabelInfo ( CommandButtonLabelInfo labelInfo ) { } }
if ( labelInfo == null ) { labelInfo = CommandButtonLabelInfo . BLANK_BUTTON_LABEL ; } CommandButtonLabelInfo old = this . labelInfo ; this . labelInfo = labelInfo ; firePropertyChange ( LABEL_INFO_PROPERTY , old , this . labelInfo ) ;
public class Guid { /** * Returns the 6 byte pseudo IEEE 802 Media Access Control ( MAC ) address . * Since the IEEE 802 Media Access Control ( MAC ) address of the local Network * Interface Card ( NIC ) cannot be retrieved in Java , a pseudo IEEE 802 MAC address * is required . The pseudo IEEE 802 MAC address is generated by applying a hash * function to a concatenation of the following random data : * < ul > * < li > Cross - thread and cross - process unique time stamp ( milliseconds ) < / li > * < li > Hexadecimal representation of the hash code of a new < code > java . lang . Object < / code > < / li > * < li > Free memory in the JVM ( bytes ) < / li > * < li > Total memory in the JVM ( bytes ) < / li > * < / ul > * The MD5 hash algorithm is used to generate pseudo IEEE 802 MAC address . This hash * algorithm is invoked through the < code > java . security . MessageDigest < / code > which requires * a provider for the MD5 hash algorithm ( see Java Cryptography Architecture ( JCA ) ) . If the * provider for the MD5 hash algorithm is not available in the run - time environment , a simple * additive hash algorithm is used over the random data . However , it is computationally feasible * for this simple additive hash algorithm to generate the same output for two different inputs . * @ param uniqueTimeStamp Cross - thread and cross - process unique time stamp ( milliseconds ) * @ return 6 byte pseudo IEEE 802 MAC address . */ private synchronized byte [ ] getPseudoIEEE802MACAddress ( long uniqueTimeStamp ) { } }
byte [ ] ieee802Addr = new byte [ 6 ] ; byte [ ] currentTime = String . valueOf ( uniqueTimeStamp ) . getBytes ( ) ; byte [ ] localHostAddress = null ; try { localHostAddress = InetAddress . getLocalHost ( ) . getAddress ( ) ; } catch ( UnknownHostException u ) { localHostAddress = new byte [ ] { 127 , 0 , 0 , 1 } ; } // Use the unsigned hexadecimal representation of the hash code of the object instead // of the java . lang . Object # toString ( ) to eliminate the ' java . lang . Object @ ' prefix // ( see java . lang . Object # toString ( ) ) which never changes : byte [ ] inMemoryObject = Integer . toHexString ( new Object ( ) . hashCode ( ) ) . getBytes ( ) ; byte [ ] freeMemory = String . valueOf ( Runtime . getRuntime ( ) . freeMemory ( ) ) . getBytes ( ) ; byte [ ] totalMemory = String . valueOf ( Runtime . getRuntime ( ) . totalMemory ( ) ) . getBytes ( ) ; byte [ ] messageDigestInput = new byte [ currentTime . length + localHostAddress . length + inMemoryObject . length + freeMemory . length + totalMemory . length ] ; int messageDigestInputIndex = 0 ; System . arraycopy ( currentTime , 0 , messageDigestInput , messageDigestInputIndex , currentTime . length ) ; messageDigestInputIndex += currentTime . length ; System . arraycopy ( localHostAddress , 0 , messageDigestInput , messageDigestInputIndex , localHostAddress . length ) ; messageDigestInputIndex += localHostAddress . length ; System . arraycopy ( inMemoryObject , 0 , messageDigestInput , messageDigestInputIndex , inMemoryObject . length ) ; messageDigestInputIndex += inMemoryObject . length ; System . arraycopy ( freeMemory , 0 , messageDigestInput , messageDigestInputIndex , freeMemory . length ) ; messageDigestInputIndex += freeMemory . length ; System . arraycopy ( totalMemory , 0 , messageDigestInput , messageDigestInputIndex , totalMemory . length ) ; try { // Compute the message digest : MessageDigest md5 = MessageDigest . getInstance ( "MD5" ) ; md5 . reset ( ) ; System . arraycopy ( md5 . digest ( messageDigestInput ) , 0 , ieee802Addr , 0 , 6 ) ; } catch ( Throwable t ) { // If a provider for the MD5 hash algorithm ( see Java Cryptography Architecture ( JCA ) APIs ) // is not available in the run - time environment , a simple additive hash algorithm is used over the random // data . The simple additive hash algorithm is similar to that used in java . lang . String # hashCode ( ) . // That is : // b [ 0 ] * 31 ^ ( n - 1 ) + b [ 1 ] * 31 ^ ( n - 2 ) + . . . + b [ n - 1] // where : // b [ i ] = i - th byte // n = total number of bytes // ^ = exponentiation // Note : It is computationally feasible for this simple additive hash algorithm to generate the same output // for two different inputs . int hashCode = 0 ; for ( int counter = 0 ; counter < messageDigestInput . length ; counter ++ ) { hashCode = ( 31 * hashCode ) + messageDigestInput [ counter ] ; } byte [ ] hashCodeBytes = Integer . toHexString ( hashCode ) . getBytes ( ) ; // Zero pad the hash code if less than 6 bytes : if ( hashCodeBytes . length < 6 ) { System . arraycopy ( hashCodeBytes , 0 , ieee802Addr , ( 6 - hashCodeBytes . length ) , hashCodeBytes . length ) ; } // Use the last 6 bytes of the hash code which contains the most varying data and : else { System . arraycopy ( hashCodeBytes , ( hashCodeBytes . length - 6 ) , ieee802Addr , 0 , 6 ) ; } } // Turn on the high order bit of the first byte : ieee802Addr [ 0 ] |= 0x80 ; return ieee802Addr ;
public class SoapServerFaultResponseActionBuilder { /** * Sets the charset name for this send action builder ' s attachment . * @ param charsetName * @ return */ public SoapServerFaultResponseActionBuilder charset ( String charsetName ) { } }
if ( ! getAction ( ) . getAttachments ( ) . isEmpty ( ) ) { getAction ( ) . getAttachments ( ) . get ( getAction ( ) . getAttachments ( ) . size ( ) - 1 ) . setCharsetName ( charsetName ) ; } return this ;