signature stringlengths 43 39.1k | implementation stringlengths 0 450k |
|---|---|
public class DevUtilsDemoObjects { /** * region > create ( action ) */
@ MemberOrder ( sequence = "2" ) public DevUtilsDemoObject create ( @ ParameterLayout ( named = "Name" ) final String name ) { } } | final DevUtilsDemoObject obj = container . newTransientInstance ( DevUtilsDemoObject . class ) ; obj . setName ( name ) ; container . persistIfNotAlready ( obj ) ; return obj ; |
public class XMLGregorianCalendar { /** * < p > Set time as one unit , including optional milliseconds . < / p >
* @ param hour value constraints are summarized in
* < a href = " # datetimefield - hour " > hour field of date / time field mapping table < / a > .
* @ param minute value constraints are summarized in
* < a href = " # datetimefield - minute " > minute field of date / time field mapping table < / a > .
* @ param second value constraints are summarized in
* < a href = " # datetimefield - second " > second field of date / time field mapping table < / a > .
* @ param millisecond value of { @ link DatatypeConstants # FIELD _ UNDEFINED } indicates this
* optional field is not set .
* @ throws IllegalArgumentException if any parameter is
* outside value constraints for the field as specified in
* < a href = " # datetimefieldmapping " > date / time field mapping table < / a > . */
public void setTime ( int hour , int minute , int second , int millisecond ) { } } | setHour ( hour ) ; setMinute ( minute ) ; setSecond ( second ) ; setMillisecond ( millisecond ) ; |
public class SpringDataSourceFactoryBean { /** * { @ inheritDoc } */
@ Override protected DataSource createInstance ( ) { } } | if ( targetName == null ) { throw new IllegalStateException ( "targetName must not be null" ) ; } final DataSource dataSource = getBeanFactory ( ) . getBean ( targetName , DataSource . class ) ; JdbcWrapper . registerSpringDataSource ( targetName , dataSource ) ; final DataSource result = JdbcWrapper . SINGLETON . createDataSourceProxy ( targetName , dataSource ) ; LOG . debug ( "Spring target datasource wrapped: " + targetName ) ; return result ; |
public class RetryStageExecutionRequestMarshaller { /** * Marshall the given parameter object . */
public void marshall ( RetryStageExecutionRequest retryStageExecutionRequest , ProtocolMarshaller protocolMarshaller ) { } } | if ( retryStageExecutionRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( retryStageExecutionRequest . getPipelineName ( ) , PIPELINENAME_BINDING ) ; protocolMarshaller . marshall ( retryStageExecutionRequest . getStageName ( ) , STAGENAME_BINDING ) ; protocolMarshaller . marshall ( retryStageExecutionRequest . getPipelineExecutionId ( ) , PIPELINEEXECUTIONID_BINDING ) ; protocolMarshaller . marshall ( retryStageExecutionRequest . getRetryMode ( ) , RETRYMODE_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class GDLLoader { /** * Returns a cache containing a mapping from variables to edges .
* @ param includeUserDefined include user - defined variables
* @ param includeAutoGenerated include auto - generated variables
* @ return immutable edge cache */
Map < String , Edge > getEdgeCache ( boolean includeUserDefined , boolean includeAutoGenerated ) { } } | return getCache ( userEdgeCache , autoEdgeCache , includeUserDefined , includeAutoGenerated ) ; |
public class CommerceAccountUserRelServiceBaseImpl { /** * Sets the user group role remote service .
* @ param userGroupRoleService the user group role remote service */
public void setUserGroupRoleService ( com . liferay . portal . kernel . service . UserGroupRoleService userGroupRoleService ) { } } | this . userGroupRoleService = userGroupRoleService ; |
public class ApiOvhTelephony { /** * Get all available specific number from a country
* REST : GET / telephony / number / specificNumbers
* @ param type [ required ] The type of number
* @ param country [ required ] The country
* @ param zone [ required ] The zone ( geographic number )
* @ param range [ required ] The range ( special number ) */
public ArrayList < OvhSpecificNumber > number_specificNumbers_GET ( OvhNumberCountryEnum country , String range , OvhNumberTypeEnum type , String zone ) throws IOException { } } | String qPath = "/telephony/number/specificNumbers" ; StringBuilder sb = path ( qPath ) ; query ( sb , "country" , country ) ; query ( sb , "range" , range ) ; query ( sb , "type" , type ) ; query ( sb , "zone" , zone ) ; String resp = execN ( qPath , "GET" , sb . toString ( ) , null ) ; return convertTo ( resp , t22 ) ; |
public class Stream { /** * Zip together the iterators until all of them runs out of values .
* Each array of values is combined into a single value using the supplied zipFunction function .
* @ param c
* @ param valuesForNone value to fill for any iterator runs out of values .
* @ param zipFunction
* @ return */
@ SuppressWarnings ( "resource" ) public static < R > Stream < R > zip ( final Collection < ? extends CharStream > c , final char [ ] valuesForNone , final CharNFunction < R > zipFunction ) { } } | if ( N . isNullOrEmpty ( c ) ) { return Stream . empty ( ) ; } final int len = c . size ( ) ; if ( len != valuesForNone . length ) { throw new IllegalArgumentException ( "The size of 'valuesForNone' must be same as the size of the collection of iterators" ) ; } final CharStream [ ] ss = c . toArray ( new CharStream [ len ] ) ; final CharIterator [ ] iters = new CharIterator [ len ] ; for ( int i = 0 ; i < len ; i ++ ) { iters [ i ] = ss [ i ] . iteratorEx ( ) ; } return new IteratorStream < > ( new ObjIteratorEx < R > ( ) { @ Override public boolean hasNext ( ) { for ( int i = 0 ; i < len ; i ++ ) { if ( iters [ i ] != null ) { if ( iters [ i ] . hasNext ( ) ) { return true ; } else if ( iters [ i ] != null ) { iters [ i ] = null ; ss [ i ] . close ( ) ; } } } return false ; } @ Override public R next ( ) { final char [ ] args = new char [ len ] ; boolean hasNext = false ; for ( int i = 0 ; i < len ; i ++ ) { if ( iters [ i ] != null && iters [ i ] . hasNext ( ) ) { hasNext = true ; args [ i ] = iters [ i ] . nextChar ( ) ; } else { args [ i ] = valuesForNone [ i ] ; } } if ( hasNext == false ) { throw new NoSuchElementException ( ) ; } return zipFunction . apply ( args ) ; } } ) . onClose ( newCloseHandler ( c ) ) ; |
public class Shape { /** * Iterate over 2
* coordinate spaces given 2 arrays
* @ param arr the first array
* @ param arr2 the second array
* @ param coordinateFunction the coordinate function to use */
public static void iterate ( INDArray arr , INDArray arr2 , CoordinateFunction coordinateFunction ) { } } | Shape . iterate ( 0 , arr . rank ( ) , arr . shape ( ) , new long [ arr . rank ( ) ] , 0 , arr2 . rank ( ) , arr2 . shape ( ) , new long [ arr2 . rank ( ) ] , coordinateFunction ) ; |
public class SearchViewFacade { /** * Sets a listener for user actions within the SearchView .
* @ param listener the listener object that receives callbacks when the user performs
* actions in the SearchView such as clicking on buttons or typing a query . */
public void setOnQueryTextListener ( @ NonNull final SearchView . OnQueryTextListener listener ) { } } | if ( searchView != null ) { searchView . setOnQueryTextListener ( listener ) ; } else if ( supportView != null ) { supportView . setOnQueryTextListener ( new android . support . v7 . widget . SearchView . OnQueryTextListener ( ) { @ Override public boolean onQueryTextSubmit ( String query ) { return listener . onQueryTextSubmit ( query ) ; } @ Override public boolean onQueryTextChange ( String newText ) { return listener . onQueryTextChange ( newText ) ; } } ) ; } else { throw new IllegalStateException ( ERROR_NO_SEARCHVIEW ) ; } |
public class BayesNetReader { /** * Reads a Bayesian Network from a network InputStream and a CPD InputStream , and returns
* a factor graph representation of it . */
public FactorGraph readBnAsFg ( InputStream networkIs , InputStream cpdIs ) throws IOException { } } | // Read network file .
BufferedReader networkReader = new BufferedReader ( new InputStreamReader ( networkIs ) ) ; // - - read the number of variables .
int numVars = Integer . parseInt ( networkReader . readLine ( ) . trim ( ) ) ; varMap = new HashMap < String , Var > ( ) ; VarSet allVars = new VarSet ( ) ; for ( int i = 0 ; i < numVars ; i ++ ) { Var var = parseVar ( networkReader . readLine ( ) ) ; allVars . add ( var ) ; varMap . put ( var . getName ( ) , var ) ; } assert ( allVars . size ( ) == numVars ) ; // - - read the dependencies between variables .
// . . . . or not . . .
networkReader . close ( ) ; // Read CPD file .
BufferedReader cpdReader = new BufferedReader ( new InputStreamReader ( cpdIs ) ) ; factorMap = new LinkedHashMap < VarSet , ExplicitFactor > ( ) ; String line ; while ( ( line = cpdReader . readLine ( ) ) != null ) { // Parse out the variable configuration .
VarConfig config = new VarConfig ( ) ; String [ ] assns = whitespaceOrComma . split ( line ) ; for ( int i = 0 ; i < assns . length - 1 ; i ++ ) { String assn = assns [ i ] ; String [ ] va = equals . split ( assn ) ; assert ( va . length == 2 ) ; String varName = va [ 0 ] ; String stateName = va [ 1 ] ; config . put ( varMap . get ( varName ) , stateName ) ; } // The double is the last value on the line .
double value = Double . parseDouble ( assns [ assns . length - 1 ] ) ; // Factor graphs store the log value .
value = FastMath . log ( value ) ; // Get the factor for this configuration , creating a new one if necessary .
VarSet vars = config . getVars ( ) ; ExplicitFactor f = factorMap . get ( vars ) ; if ( f == null ) { f = new ExplicitFactor ( vars ) ; } // Set the value in the factor .
f . setValue ( config . getConfigIndex ( ) , value ) ; factorMap . put ( vars , f ) ; } cpdReader . close ( ) ; // Create the factor graph .
FactorGraph fg = new FactorGraph ( ) ; for ( ExplicitFactor f : factorMap . values ( ) ) { fg . addFactor ( f ) ; } return fg ; |
public class ELTools { /** * Yields the type of the variable displayed by a component .
* @ param p _ component
* the UIComponent
* @ return the type ( as class ) */
public static Class < ? > getType ( UIComponent p_component ) { } } | ValueExpression valueExpression = p_component . getValueExpression ( "value" ) ; if ( valueExpression != null ) { FacesContext context = FacesContext . getCurrentInstance ( ) ; ELContext elContext = context . getELContext ( ) ; return valueExpression . getType ( elContext ) ; } return null ; |
public class Utils { /** * Creates the launch context , which describes how to bring up a TaskExecutor / TaskManager process in
* an allocated YARN container .
* < p > This code is extremely YARN specific and registers all the resources that the TaskExecutor
* needs ( such as JAR file , config file , . . . ) and all environment variables in a YARN
* container launch context . The launch context then ensures that those resources will be
* copied into the containers transient working directory .
* @ param flinkConfig
* The Flink configuration object .
* @ param yarnConfig
* The YARN configuration object .
* @ param env
* The environment variables .
* @ param tmParams
* The TaskExecutor container memory parameters .
* @ param taskManagerConfig
* The configuration for the TaskExecutors .
* @ param workingDirectory
* The current application master container ' s working directory .
* @ param taskManagerMainClass
* The class with the main method .
* @ param log
* The logger .
* @ return The launch context for the TaskManager processes .
* @ throws Exception Thrown if the launch context could not be created , for example if
* the resources could not be copied . */
static ContainerLaunchContext createTaskExecutorContext ( org . apache . flink . configuration . Configuration flinkConfig , YarnConfiguration yarnConfig , Map < String , String > env , ContaineredTaskManagerParameters tmParams , org . apache . flink . configuration . Configuration taskManagerConfig , String workingDirectory , Class < ? > taskManagerMainClass , Logger log ) throws Exception { } } | // get and validate all relevant variables
String remoteFlinkJarPath = env . get ( YarnConfigKeys . FLINK_JAR_PATH ) ; require ( remoteFlinkJarPath != null , "Environment variable %s not set" , YarnConfigKeys . FLINK_JAR_PATH ) ; String appId = env . get ( YarnConfigKeys . ENV_APP_ID ) ; require ( appId != null , "Environment variable %s not set" , YarnConfigKeys . ENV_APP_ID ) ; String clientHomeDir = env . get ( YarnConfigKeys . ENV_CLIENT_HOME_DIR ) ; require ( clientHomeDir != null , "Environment variable %s not set" , YarnConfigKeys . ENV_CLIENT_HOME_DIR ) ; String shipListString = env . get ( YarnConfigKeys . ENV_CLIENT_SHIP_FILES ) ; require ( shipListString != null , "Environment variable %s not set" , YarnConfigKeys . ENV_CLIENT_SHIP_FILES ) ; String yarnClientUsername = env . get ( YarnConfigKeys . ENV_HADOOP_USER_NAME ) ; require ( yarnClientUsername != null , "Environment variable %s not set" , YarnConfigKeys . ENV_HADOOP_USER_NAME ) ; final String remoteKeytabPath = env . get ( YarnConfigKeys . KEYTAB_PATH ) ; final String remoteKeytabPrincipal = env . get ( YarnConfigKeys . KEYTAB_PRINCIPAL ) ; final String remoteYarnConfPath = env . get ( YarnConfigKeys . ENV_YARN_SITE_XML_PATH ) ; final String remoteKrb5Path = env . get ( YarnConfigKeys . ENV_KRB5_PATH ) ; if ( log . isDebugEnabled ( ) ) { log . debug ( "TM:remote keytab path obtained {}" , remoteKeytabPath ) ; log . debug ( "TM:remote keytab principal obtained {}" , remoteKeytabPrincipal ) ; log . debug ( "TM:remote yarn conf path obtained {}" , remoteYarnConfPath ) ; log . debug ( "TM:remote krb5 path obtained {}" , remoteKrb5Path ) ; } String classPathString = env . get ( ENV_FLINK_CLASSPATH ) ; require ( classPathString != null , "Environment variable %s not set" , YarnConfigKeys . ENV_FLINK_CLASSPATH ) ; // register keytab
LocalResource keytabResource = null ; if ( remoteKeytabPath != null ) { log . info ( "Adding keytab {} to the AM container local resource bucket" , remoteKeytabPath ) ; Path keytabPath = new Path ( remoteKeytabPath ) ; FileSystem fs = keytabPath . getFileSystem ( yarnConfig ) ; keytabResource = registerLocalResource ( fs , keytabPath ) ; } // To support Yarn Secure Integration Test Scenario
LocalResource yarnConfResource = null ; LocalResource krb5ConfResource = null ; boolean hasKrb5 = false ; if ( remoteYarnConfPath != null && remoteKrb5Path != null ) { log . info ( "TM:Adding remoteYarnConfPath {} to the container local resource bucket" , remoteYarnConfPath ) ; Path yarnConfPath = new Path ( remoteYarnConfPath ) ; FileSystem fs = yarnConfPath . getFileSystem ( yarnConfig ) ; yarnConfResource = registerLocalResource ( fs , yarnConfPath ) ; log . info ( "TM:Adding remoteKrb5Path {} to the container local resource bucket" , remoteKrb5Path ) ; Path krb5ConfPath = new Path ( remoteKrb5Path ) ; fs = krb5ConfPath . getFileSystem ( yarnConfig ) ; krb5ConfResource = registerLocalResource ( fs , krb5ConfPath ) ; hasKrb5 = true ; } // register Flink Jar with remote HDFS
final LocalResource flinkJar ; { Path remoteJarPath = new Path ( remoteFlinkJarPath ) ; FileSystem fs = remoteJarPath . getFileSystem ( yarnConfig ) ; flinkJar = registerLocalResource ( fs , remoteJarPath ) ; } // register conf with local fs
final LocalResource flinkConf ; { // write the TaskManager configuration to a local file
final File taskManagerConfigFile = new File ( workingDirectory , UUID . randomUUID ( ) + "-taskmanager-conf.yaml" ) ; log . debug ( "Writing TaskManager configuration to {}" , taskManagerConfigFile . getAbsolutePath ( ) ) ; BootstrapTools . writeConfiguration ( taskManagerConfig , taskManagerConfigFile ) ; try { Path homeDirPath = new Path ( clientHomeDir ) ; FileSystem fs = homeDirPath . getFileSystem ( yarnConfig ) ; flinkConf = setupLocalResource ( fs , appId , new Path ( taskManagerConfigFile . toURI ( ) ) , homeDirPath , "" ) . f1 ; log . debug ( "Prepared local resource for modified yaml: {}" , flinkConf ) ; } finally { try { FileUtils . deleteFileOrDirectory ( taskManagerConfigFile ) ; } catch ( IOException e ) { log . info ( "Could not delete temporary configuration file " + taskManagerConfigFile . getAbsolutePath ( ) + '.' , e ) ; } } } Map < String , LocalResource > taskManagerLocalResources = new HashMap < > ( ) ; taskManagerLocalResources . put ( "flink.jar" , flinkJar ) ; taskManagerLocalResources . put ( "flink-conf.yaml" , flinkConf ) ; // To support Yarn Secure Integration Test Scenario
if ( yarnConfResource != null && krb5ConfResource != null ) { taskManagerLocalResources . put ( YARN_SITE_FILE_NAME , yarnConfResource ) ; taskManagerLocalResources . put ( KRB5_FILE_NAME , krb5ConfResource ) ; } if ( keytabResource != null ) { taskManagerLocalResources . put ( KEYTAB_FILE_NAME , keytabResource ) ; } // prepare additional files to be shipped
for ( String pathStr : shipListString . split ( "," ) ) { if ( ! pathStr . isEmpty ( ) ) { String [ ] keyAndPath = pathStr . split ( "=" ) ; require ( keyAndPath . length == 2 , "Invalid entry in ship file list: %s" , pathStr ) ; Path path = new Path ( keyAndPath [ 1 ] ) ; LocalResource resource = registerLocalResource ( path . getFileSystem ( yarnConfig ) , path ) ; taskManagerLocalResources . put ( keyAndPath [ 0 ] , resource ) ; } } // now that all resources are prepared , we can create the launch context
log . info ( "Creating container launch context for TaskManagers" ) ; boolean hasLogback = new File ( workingDirectory , "logback.xml" ) . exists ( ) ; boolean hasLog4j = new File ( workingDirectory , "log4j.properties" ) . exists ( ) ; String launchCommand = BootstrapTools . getTaskManagerShellCommand ( flinkConfig , tmParams , "." , ApplicationConstants . LOG_DIR_EXPANSION_VAR , hasLogback , hasLog4j , hasKrb5 , taskManagerMainClass ) ; if ( log . isDebugEnabled ( ) ) { log . debug ( "Starting TaskManagers with command: " + launchCommand ) ; } else { log . info ( "Starting TaskManagers" ) ; } ContainerLaunchContext ctx = Records . newRecord ( ContainerLaunchContext . class ) ; ctx . setCommands ( Collections . singletonList ( launchCommand ) ) ; ctx . setLocalResources ( taskManagerLocalResources ) ; Map < String , String > containerEnv = new HashMap < > ( ) ; containerEnv . putAll ( tmParams . taskManagerEnv ( ) ) ; // add YARN classpath , etc to the container environment
containerEnv . put ( ENV_FLINK_CLASSPATH , classPathString ) ; setupYarnClassPath ( yarnConfig , containerEnv ) ; containerEnv . put ( YarnConfigKeys . ENV_HADOOP_USER_NAME , UserGroupInformation . getCurrentUser ( ) . getUserName ( ) ) ; if ( remoteKeytabPath != null && remoteKeytabPrincipal != null ) { containerEnv . put ( YarnConfigKeys . KEYTAB_PATH , remoteKeytabPath ) ; containerEnv . put ( YarnConfigKeys . KEYTAB_PRINCIPAL , remoteKeytabPrincipal ) ; } ctx . setEnvironment ( containerEnv ) ; // For TaskManager YARN container context , read the tokens from the jobmanager yarn container local file .
// NOTE : must read the tokens from the local file , not from the UGI context , because if UGI is login
// using Kerberos keytabs , there is no HDFS delegation token in the UGI context .
final String fileLocation = System . getenv ( UserGroupInformation . HADOOP_TOKEN_FILE_LOCATION ) ; if ( fileLocation != null ) { log . debug ( "Adding security tokens to TaskExecutor's container launch context." ) ; try ( DataOutputBuffer dob = new DataOutputBuffer ( ) ) { Method readTokenStorageFileMethod = Credentials . class . getMethod ( "readTokenStorageFile" , File . class , org . apache . hadoop . conf . Configuration . class ) ; Credentials cred = ( Credentials ) readTokenStorageFileMethod . invoke ( null , new File ( fileLocation ) , HadoopUtils . getHadoopConfiguration ( flinkConfig ) ) ; cred . writeTokenStorageToStream ( dob ) ; ByteBuffer securityTokens = ByteBuffer . wrap ( dob . getData ( ) , 0 , dob . getLength ( ) ) ; ctx . setTokens ( securityTokens ) ; } catch ( Throwable t ) { log . error ( "Failed to add Hadoop's security tokens." , t ) ; } } else { log . info ( "Could not set security tokens because Hadoop's token file location is unknown." ) ; } return ctx ; |
public class dospolicy { /** * Use this API to fetch all the dospolicy resources that are configured on netscaler . */
public static dospolicy [ ] get ( nitro_service service ) throws Exception { } } | dospolicy obj = new dospolicy ( ) ; dospolicy [ ] response = ( dospolicy [ ] ) obj . get_resources ( service ) ; return response ; |
public class CachedRemoteTable { /** * Get / Make this remote database session for this table session .
* @ param properties The client database properties ( Typically for transaction support ) . */
public RemoteDatabase getRemoteDatabase ( Map < String , Object > properties ) throws RemoteException { } } | return m_tableRemote . getRemoteDatabase ( properties ) ; |
public class ChannelCreateHandler { /** * Handles server text channel creation .
* @ param channel The channel data . */
private void handleServerTextChannel ( JsonNode channel ) { } } | long serverId = channel . get ( "guild_id" ) . asLong ( ) ; api . getPossiblyUnreadyServerById ( serverId ) . ifPresent ( server -> { ServerTextChannel textChannel = ( ( ServerImpl ) server ) . getOrCreateServerTextChannel ( channel ) ; ServerChannelCreateEvent event = new ServerChannelCreateEventImpl ( textChannel ) ; api . getEventDispatcher ( ) . dispatchServerChannelCreateEvent ( ( DispatchQueueSelector ) server , server , event ) ; } ) ; |
public class CompositeByteBuf { /** * Add the given { @ link ByteBuf } on the specific index and increase the { @ code writerIndex }
* if { @ code increaseWriterIndex } is { @ code true } .
* { @ link ByteBuf # release ( ) } ownership of { @ code buffer } is transferred to this { @ link CompositeByteBuf } .
* @ param cIndex the index on which the { @ link ByteBuf } will be added .
* @ param buffer the { @ link ByteBuf } to add . { @ link ByteBuf # release ( ) } ownership is transferred to this
* { @ link CompositeByteBuf } . */
public CompositeByteBuf addComponent ( boolean increaseWriterIndex , int cIndex , ByteBuf buffer ) { } } | checkNotNull ( buffer , "buffer" ) ; addComponent0 ( increaseWriterIndex , cIndex , buffer ) ; consolidateIfNeeded ( ) ; return this ; |
public class PropertyLoader { /** * Lookup object from JNDI
* @ param propertyKey property key
* @ param < T > JNDI object type
* @ return JNDI object */
@ SuppressWarnings ( "unchecked" ) private < T > T jndiLookup ( PropertyKey propertyKey ) { } } | String property = properties . getProperty ( propertyKey . getKey ( ) ) ; if ( property != null ) { return isJndiLazyLookup ( ) ? ( T ) LazyJndiResolver . newInstance ( property , DataSource . class ) : ( T ) JndiUtils . lookup ( property ) ; } return null ; |
public class Caster { /** * cast a Object to a int value ( primitive value type )
* @ param o Object to cast
* @ param defaultValue
* @ return casted int value */
public static int toIntValue ( Object o , int defaultValue ) { } } | if ( o instanceof Number ) return ( ( Number ) o ) . intValue ( ) ; else if ( o instanceof Boolean ) return ( ( Boolean ) o ) . booleanValue ( ) ? 1 : 0 ; else if ( o instanceof CharSequence ) return toIntValue ( o . toString ( ) . trim ( ) , defaultValue ) ; // else if ( o instanceof Clob ) return toIntValue ( toString ( o ) ) ;
else if ( o instanceof Character ) return ( int ) ( ( ( Character ) o ) . charValue ( ) ) ; else if ( o instanceof Castable ) { return ( int ) ( ( Castable ) o ) . castToDoubleValue ( defaultValue ) ; } else if ( o instanceof Date ) return ( int ) new DateTimeImpl ( ( Date ) o ) . castToDoubleValue ( ) ; else if ( o instanceof ObjectWrap ) return toIntValue ( ( ( ObjectWrap ) o ) . getEmbededObject ( Integer . valueOf ( defaultValue ) ) , defaultValue ) ; return defaultValue ; |
public class Pools { /** * Remove pool .
* @ param pool pool to remove */
public static void remove ( Pool pool ) { } } | if ( poolMap . containsKey ( pool . getUrlParser ( ) ) ) { synchronized ( poolMap ) { if ( poolMap . containsKey ( pool . getUrlParser ( ) ) ) { poolMap . remove ( pool . getUrlParser ( ) ) ; shutdownExecutor ( ) ; } } } |
public class AnnotatedString { /** * Return the appropriate mnemonic character for this string . If no mnemonic
* should be displayed , KeyEvent . VK _ UNDEFINED is returned .
* @ return the Mnemonic character , or VK _ UNDEFINED if no mnemonic should be
* set */
public int getMnemonic ( ) { } } | int mnemonic = KeyEvent . VK_UNDEFINED ; if ( ! MAC_OS_X ) { int index = getMnemonicIndex ( ) ; if ( ( index >= 0 ) && ( ( index + 1 ) < myAnnotatedString . length ( ) ) ) { mnemonic = Character . toUpperCase ( myAnnotatedString . charAt ( index + 1 ) ) ; } } return mnemonic ; |
public class ZipFile { /** * Closes the ZIP file .
* < p > Closing this ZIP file will close all of the input streams
* previously returned by invocations of the { @ link # getInputStream
* getInputStream } method .
* @ throws IOException if an I / O error has occurred */
public void close ( ) throws IOException { } } | if ( closeRequested ) return ; guard . close ( ) ; closeRequested = true ; synchronized ( this ) { // Close streams , release their inflaters
synchronized ( streams ) { if ( false == streams . isEmpty ( ) ) { Map < InputStream , Inflater > copy = new HashMap < > ( streams ) ; streams . clear ( ) ; for ( Map . Entry < InputStream , Inflater > e : copy . entrySet ( ) ) { e . getKey ( ) . close ( ) ; Inflater inf = e . getValue ( ) ; if ( inf != null ) { inf . end ( ) ; } } } } // Release cached inflaters
Inflater inf ; synchronized ( inflaterCache ) { while ( null != ( inf = inflaterCache . poll ( ) ) ) { inf . end ( ) ; } } if ( jzfile != 0 ) { // Close the zip file
long zf = this . jzfile ; jzfile = 0 ; close ( zf ) ; } // Android - changed , explicit delete for OPEN _ DELETE ZipFile .
if ( fileToRemoveOnClose != null ) { fileToRemoveOnClose . delete ( ) ; } } |
public class VersionEdit { /** * REQUIRES : " smallest " and " largest " are smallest and largest keys in file */
public void addFile ( int level , long fileNumber , long fileSize , InternalKey smallest , InternalKey largest ) { } } | FileMetaData fileMetaData = new FileMetaData ( fileNumber , fileSize , smallest , largest ) ; addFile ( level , fileMetaData ) ; |
public class EmvParser { /** * Read public card data from parameter AID
* @ param pApplication
* application data
* @ return true if succeed false otherwise
* @ throws CommunicationException communication error */
protected boolean extractPublicData ( final Application pApplication ) throws CommunicationException { } } | boolean ret = false ; // Select AID
byte [ ] data = selectAID ( pApplication . getAid ( ) ) ; // check response
// Add SW _ 6285 to fix Interact issue
if ( ResponseUtils . contains ( data , SwEnum . SW_9000 , SwEnum . SW_6285 ) ) { // Update reading state
pApplication . setReadingStep ( ApplicationStepEnum . SELECTED ) ; // Parse select response
ret = parse ( data , pApplication ) ; if ( ret ) { // Get AID
String aid = BytesUtils . bytesToStringNoSpace ( TlvUtil . getValue ( data , EmvTags . DEDICATED_FILE_NAME ) ) ; String applicationLabel = extractApplicationLabel ( data ) ; if ( applicationLabel == null ) { applicationLabel = pApplication . getApplicationLabel ( ) ; } if ( LOGGER . isDebugEnabled ( ) ) { LOGGER . debug ( "Application label:" + applicationLabel + " with Aid:" + aid ) ; } template . get ( ) . getCard ( ) . setType ( findCardScheme ( aid , template . get ( ) . getCard ( ) . getCardNumber ( ) ) ) ; pApplication . setAid ( BytesUtils . fromString ( aid ) ) ; pApplication . setApplicationLabel ( applicationLabel ) ; pApplication . setLeftPinTry ( getLeftPinTry ( ) ) ; pApplication . setTransactionCounter ( getTransactionCounter ( ) ) ; template . get ( ) . getCard ( ) . setState ( CardStateEnum . ACTIVE ) ; } } return ret ; |
public class FileUtils { /** * 遍历指定文件夹下所有文件
* @ param file 文件夹
* @ return 指定文件夹下所有文件 ( 不包括文件夹 , 只有文件 , 如果给定的不是文件夹而是文件的话将会返回文件本身 ) */
public static List < File > findAllFile ( File file ) { } } | if ( file == null || ! file . exists ( ) ) { return Collections . emptyList ( ) ; } if ( file . isFile ( ) ) { return Collections . singletonList ( file ) ; } List < File > fileList = new ArrayList < > ( ) ; File [ ] files = file . listFiles ( ) ; for ( File f : files ) { fileList . addAll ( findAllFile ( f ) ) ; } return fileList ; |
public class BaseMoskitoUIAction { /** * Sets an info message that can be shown on next screen . The info message is readable exactly once .
* @ param message the info message . */
protected void setInfoMessage ( String message ) { } } | try { APICallContext . getCallContext ( ) . getCurrentSession ( ) . setAttribute ( "infoMessage" , APISession . POLICY_FLASH , message ) ; } catch ( NullPointerException e ) { log . error ( "Can't set info message (flash) due" , e ) ; log . error ( "APICallContext: " + APICallContext . getCallContext ( ) ) ; if ( APICallContext . getCallContext ( ) != null ) { log . error ( "Current Session: " + APICallContext . getCallContext ( ) . getCurrentSession ( ) ) ; } } |
public class RedisTransporter { /** * - - - SUBSCRIBE - - - */
@ Override public Promise subscribe ( String channel ) { } } | if ( status . get ( ) == STATUS_CONNECTED ) { return clientSub . subscribe ( channel ) ; } return Promise . resolve ( ) ; |
public class PermitMonitor { private void initMainStemStatus ( ) { } } | String path = null ; try { path = StagePathUtils . getMainStem ( getPipelineId ( ) ) ; byte [ ] bytes = zookeeper . readData ( path ) ; initMainStemStatus ( bytes ) ; } catch ( ZkNoNodeException e ) { // mainstem节点挂了后 , 状态直接修改为taking
mainStemStatus = MainStemEventData . Status . TAKEING ; permitSem ( ) ; } catch ( ZkException e ) { logger . error ( path , e ) ; } |
public class DB_PostgreSQL { /** * Remapping json directly to object as opposed to traversing the tree
* @ param recordValue
* @ return
* @ throws JsonParseException
* @ throws JsonMappingException
* @ throws IOException */
private Object jsonToObject ( String recordValue ) throws JsonParseException , JsonMappingException , IOException { } } | ObjectMapper mapper = new ObjectMapper ( ) ; Object json = mapper . readValue ( recordValue , Object . class ) ; return json ; |
public class WsException { /** * Initialize the cause field for this WsException to the specified value .
* The cause is the Throwable that caused this WsException to get thrown .
* This method can be called at most once . It is generally called from within a constructor that takes a Throwable , or immediately after constructing this object with a
* constructor that does not accept a \ Throwable . Thus , if a constructor that takes Throwable as a parameter is used to construct this object , it cannot be called at all .
* @ param cause
* the Throwable which caused this WsException to be thrown .
* Null is tolerated .
* @ return a reference to this < code > Throwable < / code > instance .
* @ exception IllegalArgumentException
* if the specified cause is this
* WsException . An exception cannot be its own cause .
* @ exception IllegalStateException
* if this WsException was created with a
* constructor that specified a cause , or this method has already
* been
* called on this object . */
public synchronized Throwable initCause ( Throwable cause ) throws IllegalStateException , IllegalArgumentException { } } | super . initCause ( cause ) ; ivCause = cause ; causeInitialized = true ; return this ; |
public class Driver { /** * Unregisters specified handler .
* @ param id Handler ID
* @ return Handler , or null if none
* @ see # register */
public static ConnectionHandler unregister ( final String id ) { } } | if ( id == null || id . length ( ) == 0 ) { return null ; // Not possible
} // end of if
return handlers . remove ( id ) ; |
public class Template { /** * Debug this template .
* @ param vars
* @ param out
* @ param listener
* @ return Context
* @ throws ScriptRuntimeException
* @ throws ParseException */
public Context debug ( final Vars vars , final Out out , final BreakpointListener listener ) { } } | try { return Parser . parse ( this , listener ) . execute ( this , out , vars ) ; } catch ( Exception e ) { throw completeException ( e ) ; } |
public class ByteUtils { /** * Copy the specified bytes into a new array
* @ param array The array to copy from
* @ param from The index in the array to begin copying from
* @ param to The least index not copied
* @ return A new byte [ ] containing the copied bytes */
public static byte [ ] copy ( byte [ ] array , int from , int to ) { } } | if ( to - from < 0 ) { return new byte [ 0 ] ; } else { byte [ ] a = new byte [ to - from ] ; System . arraycopy ( array , from , a , 0 , to - from ) ; return a ; } |
public class nstcpparam { /** * Use this API to fetch all the nstcpparam resources that are configured on netscaler . */
public static nstcpparam get ( nitro_service service ) throws Exception { } } | nstcpparam obj = new nstcpparam ( ) ; nstcpparam [ ] response = ( nstcpparam [ ] ) obj . get_resources ( service ) ; return response [ 0 ] ; |
public class RunWindupCommand { private String getOptionName ( String argument ) { } } | if ( argument == null ) return null ; else if ( argument . startsWith ( "--" ) ) return argument . substring ( 2 ) ; else if ( argument . startsWith ( "-" ) ) return argument . substring ( 1 ) ; else return null ; |
public class GvmCluster { /** * Sets this cluster equal to the specified cluster
* @ param cluster a cluster , not this or null */
void set ( GvmCluster < S , K > cluster ) { } } | if ( cluster == this ) throw new IllegalArgumentException ( "cannot set cluster to itself" ) ; m0 = cluster . m0 ; clusters . space . setTo ( m1 , cluster . m1 ) ; clusters . space . setTo ( m2 , cluster . m2 ) ; var = cluster . var ; |
public class EscapeTool { /** * We override the implementation so that we sync it with the encoding strategy we use for generating URLs . Namely
* we encode all characters and we encode space as { @ code % 20 } and not as { @ code + } in the query string .
* @ param string the url to encode
* @ return the encoded URL
* @ since 8.3M1 */
@ Override public String url ( Object string ) { } } | // TODO : Introduce a xwiki - commons - url module and move this code in it so that we can share it with
// platform ' s XWikiServletURLFactory and functional test TestUtils class .
String encodedURL = null ; if ( string != null ) { try { encodedURL = URLEncoder . encode ( String . valueOf ( string ) , "UTF-8" ) ; } catch ( UnsupportedEncodingException e ) { // Should not happen ( UTF - 8 is always available )
throw new RuntimeException ( "Missing charset [UTF-8]" , e ) ; } // The previous call will convert " " into " + " ( and " + " into " % 2B " ) so we need to convert " + " into " % 20"
// It ' s ok since % 20 is allowed in both the URL path and the query string ( and anchor ) .
encodedURL = encodedURL . replaceAll ( "\\+" , "%20" ) ; } return encodedURL ; |
public class ClassIntrospectorImpl { /** * Returns the first constructor found that matches the filter parameter .
* @ param filter Filter to apply .
* @ return the first constructor found that matches the filter parameter .
* @ throws IllegalArgumentException if no constructor is found matching the filter . */
@ Override public Set < Constructor < ? > > getConstructors ( ConstructorFilter filter ) throws IllegalArgumentException { } } | return ConstructorUtils . getConstructors ( target , filter ) ; |
public class ClusterScanSupport { /** * Retrieve a list of node Ids to use for the SCAN operation .
* @ param connection
* @ return */
private static List < String > getNodeIds ( StatefulRedisClusterConnection < ? , ? > connection ) { } } | List < String > nodeIds = new ArrayList < > ( ) ; PartitionAccessor partitionAccessor = new PartitionAccessor ( connection . getPartitions ( ) ) ; for ( RedisClusterNode redisClusterNode : partitionAccessor . getMasters ( ) ) { if ( connection . getReadFrom ( ) != null ) { List < RedisNodeDescription > readCandidates = ( List ) partitionAccessor . getReadCandidates ( redisClusterNode ) ; List < RedisNodeDescription > selection = connection . getReadFrom ( ) . select ( new ReadFrom . Nodes ( ) { @ Override public List < RedisNodeDescription > getNodes ( ) { return readCandidates ; } @ Override public Iterator < RedisNodeDescription > iterator ( ) { return readCandidates . iterator ( ) ; } } ) ; if ( ! selection . isEmpty ( ) ) { RedisClusterNode selectedNode = ( RedisClusterNode ) selection . get ( 0 ) ; nodeIds . add ( selectedNode . getNodeId ( ) ) ; continue ; } } nodeIds . add ( redisClusterNode . getNodeId ( ) ) ; } return nodeIds ; |
public class NameNode { /** * The client needs to give up on the block . */
public void abandonBlock ( Block b , String src , String holder ) throws IOException { } } | abandonBlockInternal ( b , src , holder ) ; |
public class JSONUtil { /** * 设置表达式指定位置 ( 或filed对应 ) 的值 < br >
* 若表达式指向一个JSONArray则设置其坐标对应位置的值 , 若指向JSONObject则put对应key的值 < br >
* 注意 : 如果为JSONArray , 则设置值得下标不能大于已有JSONArray的长度 < br >
* < ol >
* < li > . 表达式 , 可以获取Bean对象中的属性 ( 字段 ) 值或者Map中key对应的值 < / li >
* < li > [ ] 表达式 , 可以获取集合等对象中对应index的值 < / li >
* < / ol >
* 表达式栗子 :
* < pre >
* persion
* persion . name
* persons [ 3]
* person . friends [ 5 ] . name
* < / pre >
* @ param json JSON , 可以为JSONObject或JSONArray
* @ param expression 表达式
* @ param value 值 */
public static void putByPath ( JSON json , String expression , Object value ) { } } | json . putByPath ( expression , value ) ; |
public class AnonymousPersistentField { /** * Use ReferenceIdentityMap ( with weak key and hard value setting ) instead of
* WeakHashMap to hold anonymous field values . Here is an snip of the mail from Andy Malakov :
* < snip >
* I found that usage of database identity in Java produces quite interesting problem in OJB :
* In my application all persistent Java objects use database identity instead of Java reference identity
* ( i . e . Persistable . equals ( ) is redefined so that two persistent objects are the same if they have the same
* primary key and top - level class ) .
* In OJB , for each field declared in repository there is dedicated instance of AnonymousPersistentField that stores
* object - to - field - value mapping in WeakHashMap ( in fkCache attribute ) . Despite usage of cache
* ( ObjectCachePerBrokerImpl in my case ) it is possible that identical DB objects will end up as different
* Java objects during retrieval of complex objects .
* Now imagine what happens when two identical instances are retrieved :
* When first instance is retrieved it stores its foreign keys in AnonymousPersistentField . fkCache under instance ' s
* identity . ( happens in RowReaderDefaultImpl . buildWithReflection ( ) )
* When second object is retrieved and stored in fkCache , first instance is probably still cached
* [ WeakHashMap entries are cleaned up only during GC ] . Since keys are identical WeakHashMap only updates entry
* value and DOES NOT update entry key .
* If Full GC happens after that moment it will dispose fcCache entry if the FIRST reference becomes
* soft - referenced only .
* < / snip > */
protected void putToFieldCache ( Object key , Object value ) { } } | if ( key != null ) { if ( fkCache == null ) { fkCache = new ReferenceIdentityMap ( ReferenceIdentityMap . WEAK , ReferenceIdentityMap . HARD , true ) ; } if ( value != null ) fkCache . put ( key , value ) ; else fkCache . remove ( key ) ; } |
public class DoubleBinaryMatrix { /** * Composes determinant by using permutations
* @ return */
public DoubleBinaryOperator determinant ( ) { } } | int sign = 1 ; SumBuilder sum = DoubleBinaryOperators . sumBuilder ( ) ; PermutationMatrix pm = PermutationMatrix . getInstance ( rows ) ; int perms = pm . rows ; for ( int p = 0 ; p < perms ; p ++ ) { MultiplyBuilder mul = DoubleBinaryOperators . multiplyBuilder ( ) ; for ( int i = 0 ; i < rows ; i ++ ) { mul . add ( get ( i , pm . get ( p , i ) ) ) ; } sum . add ( DoubleBinaryOperators . sign ( sign , mul . build ( ) ) ) ; sign = - sign ; } return sum . build ( ) ; |
public class JobInstanceSqlMapDao { /** * this will be called from Job Status Listener when Job is completed . */
public void deleteJobPlanAssociatedEntities ( JobInstance job ) { } } | JobPlan jobPlan = loadPlan ( job . getId ( ) ) ; environmentVariableDao . deleteAll ( jobPlan . getVariables ( ) ) ; artifactPlanRepository . deleteAll ( jobPlan . getArtifactPlansOfType ( ArtifactPlanType . file ) ) ; artifactPropertiesGeneratorRepository . deleteAll ( jobPlan . getPropertyGenerators ( ) ) ; resourceRepository . deleteAll ( jobPlan . getResources ( ) ) ; if ( jobPlan . requiresElasticAgent ( ) ) { jobAgentMetadataDao . delete ( jobAgentMetadataDao . load ( jobPlan . getJobId ( ) ) ) ; } |
public class URLImageInputStream { /** * Get input stream for the image through http connection . */
private void setupInputStream ( ) throws IOException { } } | HttpURLConnection connection = ( HttpURLConnection ) url . openConnection ( ) ; // set timeout for connecting and reading
connection . setConnectTimeout ( httpTimeout ) ; connection . setReadTimeout ( httpTimeout ) ; if ( connection . getResponseCode ( ) != HttpURLConnection . HTTP_OK ) { throw new IOException ( "Fetch of " + url + " failed with status code " + connection . getResponseCode ( ) + "\nResponse message:\n" + connection . getResponseMessage ( ) ) ; } String contentLength = connection . getHeaderField ( TransferFsImage . CONTENT_LENGTH ) ; if ( contentLength != null ) { // store image size
advertisedSize = Long . parseLong ( contentLength ) ; if ( advertisedSize <= 0 ) { throw new IOException ( "Invalid " + TransferFsImage . CONTENT_LENGTH + " header: " + contentLength ) ; } } else { throw new IOException ( TransferFsImage . CONTENT_LENGTH + " header is not provided " + "by the server when trying to fetch " + url ) ; } // get the digest
digest = TransferFsImage . parseMD5Header ( connection ) ; if ( digest == null ) { // digest must be provided , otherwise the image is not valid
throw new IOException ( "Image digest not provided for url: " + url ) ; } // get the input stream directly from the connection
inputStream = connection . getInputStream ( ) ; initialized = true ; |
public class CronSequenceGenerator { /** * Gets the range .
* @ param field the field
* @ param min the min
* @ param max the max
* @ return the range */
private int [ ] getRange ( String field , int min , int max ) { } } | int [ ] result = new int [ 2 ] ; if ( field . contains ( "*" ) ) { result [ 0 ] = min ; result [ 1 ] = max - 1 ; return result ; } if ( ! field . contains ( "-" ) ) { result [ 0 ] = result [ 1 ] = Integer . valueOf ( field ) ; } else { String [ ] split = StringUtils . delimitedListToStringArray ( field , "-" ) ; if ( split . length > 2 ) { throw new IllegalArgumentException ( "Range has more than two fields: '" + field + "' in expression \"" + this . expression + "\"" ) ; } result [ 0 ] = Integer . valueOf ( split [ 0 ] ) ; result [ 1 ] = Integer . valueOf ( split [ 1 ] ) ; } if ( result [ 0 ] >= max || result [ 1 ] >= max ) { throw new IllegalArgumentException ( "Range exceeds maximum (" + max + "): '" + field + "' in expression \"" + this . expression + "\"" ) ; } if ( result [ 0 ] < min || result [ 1 ] < min ) { throw new IllegalArgumentException ( "Range less than minimum (" + min + "): '" + field + "' in expression \"" + this . expression + "\"" ) ; } return result ; |
public class ClassLoaderUtil { /** * Returns a fallback class loader .
* @ return A class loader */
public static ClassLoader getClassLoader ( ) { } } | ClassLoader cl = Thread . currentThread ( ) . getContextClassLoader ( ) ; if ( cl == null ) { cl = ClassLoader . getSystemClassLoader ( ) ; if ( cl == null ) { // When this method is called for initializing a ICU4J class
// during bootstrap , cl might be still null ( other than Android ? ) .
// In this case , we want to use the bootstrap class loader .
cl = getBootstrapClassLoader ( ) ; } } return cl ; |
public class MovingAverageIterable { /** * / * ( non - Javadoc )
* @ see java . lang . Iterable # iterator ( ) */
@ Override public Iterator < Row > iterator ( ) { } } | return new MovingAverageIterator ( seq , dims , factories , emptyEvents , aggMap ) ; |
public class InstrumentedScheduledExecutorService { /** * { @ inheritDoc } */
@ Override public void execute ( Runnable command ) { } } | submitted . mark ( ) ; delegate . execute ( new InstrumentedRunnable ( command ) ) ; |
public class BuilderDefaults { /** * Returns an empty map if the input is null
* @ param newValue - nullable map value
* @ param < K > - map key
* @ param < V > - map value
* @ return non - null Map */
public static < K , V > Map < K , V > nullToEmptyMap ( Map < K , V > newValue ) { } } | if ( newValue == null ) { return new HashMap < > ( ) ; } return newValue ; |
public class Ifc2x3tc1FactoryImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
public String convertIfcPileConstructionEnumToString ( EDataType eDataType , Object instanceValue ) { } } | return instanceValue == null ? null : instanceValue . toString ( ) ; |
public class GeometryUtil { /** * Returns the geometric center of all the atoms in the atomContainer . See comment for
* center ( IAtomContainer atomCon , Dimension areaDim , HashMap renderingCoordinates ) for details
* on coordinate sets
* @ param container Description of the Parameter
* @ return the geometric center of the atoms in this atomContainer */
public static Point2d get2DCenter ( IAtomContainer container ) { } } | double centerX = 0 ; double centerY = 0 ; double counter = 0 ; for ( IAtom atom : container . atoms ( ) ) { if ( atom . getPoint2d ( ) != null ) { centerX += atom . getPoint2d ( ) . x ; centerY += atom . getPoint2d ( ) . y ; counter ++ ; } } return new Point2d ( centerX / ( counter ) , centerY / ( counter ) ) ; |
public class NetworkFragment { /** * < p > Registers a { @ link BroadcastReceiver } to listen for changes in the data
* connection state and invoke the appropriate callbacks . */
@ Override public void onResume ( ) { } } | super . onResume ( ) ; if ( ProfileService . getInstance ( getActivity ( ) . getApplicationContext ( ) ) . isActive ( this , Profile . NETWORK ) ) { if ( ! PermissionUtils . isGranted ( this , Manifest . permission . ACCESS_NETWORK_STATE ) ) { Log . e ( getClass ( ) . getSimpleName ( ) , "Failed to register a receiver for changes in network state. " , new IckleBotRuntimeException ( new PermissionDeniedException ( Manifest . permission . ACCESS_NETWORK_STATE , Profile . NETWORK ) ) ) ; } else { IntentFilter intentFilter = new IntentFilter ( ) ; intentFilter . addAction ( ConnectivityManager . CONNECTIVITY_ACTION ) ; getActivity ( ) . registerReceiver ( networkStateReceiver , intentFilter ) ; } } |
public class UserDataManager { /** * Get the file system resource of the passed UDO object .
* @ param aUDO
* The UDO object to get the resource from .
* @ return The matching file system resource . No check is performed , whether
* the resource exists or not ! */
@ Nonnull public static FileSystemResource getResource ( @ Nonnull final IUserDataObject aUDO ) { } } | ValueEnforcer . notNull ( aUDO , "UDO" ) ; return _getFileIO ( ) . getResource ( getUserDataPath ( ) + aUDO . getPath ( ) ) ; |
public class OSUtils { /** * Kill the process .
* @ param pid Process id
* @ throws IOException */
public static void kill ( final long pid ) throws IOException , InterruptedException { } } | if ( isUnix ( ) ) { final Process process = new ProcessBuilder ( ) . command ( "bash" , "-c" , "kill" , "-9" , String . valueOf ( pid ) ) . start ( ) ; final int returnCode = process . waitFor ( ) ; LOG . fine ( "Kill returned: " + returnCode ) ; } else if ( isWindows ( ) ) { final Process process = new ProcessBuilder ( ) . command ( "taskkill.exe" , "/f" , "/pid" , String . valueOf ( pid ) ) . start ( ) ; final int returnCode = process . waitFor ( ) ; LOG . fine ( "Kill returned: " + returnCode ) ; } else { throw new UnsupportedOperationException ( "Unable to execute kill on unknown OS" ) ; } |
public class MultiViewOps { /** * Creates a trifocal tensor from three camera matrices . The
* Page 415 in R . Hartley , and A . Zisserman , " Multiple View Geometry in Computer Vision " , 2nd Ed , Cambridge 2003
* @ param P2 Camera matrix for view 1 . 3x4 matrix
* @ param P2 Camera matrix for view 2 . 3x4 matrix
* @ param P3 Camera matrix for view 3 . 3x4 matrix
* @ param ret Storage for trifocal tensor . If null a new instance will be created .
* @ return The trifocal tensor */
public static TrifocalTensor createTrifocal ( DMatrixRMaj P1 , DMatrixRMaj P2 , DMatrixRMaj P3 , @ Nullable TrifocalTensor ret ) { } } | if ( ret == null ) ret = new TrifocalTensor ( ) ; // invariant to scale . So pick something more reasonable and maybe reduce overflow
double scale = 0 ; scale = Math . max ( scale , CommonOps_DDRM . elementMaxAbs ( P1 ) ) ; scale = Math . max ( scale , CommonOps_DDRM . elementMaxAbs ( P2 ) ) ; scale = Math . max ( scale , CommonOps_DDRM . elementMaxAbs ( P3 ) ) ; DMatrixRMaj A = new DMatrixRMaj ( 4 , 4 ) ; double sign = 1 ; for ( int i = 0 ; i < 3 ; i ++ ) { DMatrixRMaj T = ret . getT ( i ) ; for ( int row = 0 , cnt = 0 ; row < 3 ; row ++ ) { if ( row != i ) { CommonOps_DDRM . extract ( P1 , row , row + 1 , 0 , 4 , A , cnt , 0 ) ; for ( int col = 0 ; col < 4 ; col ++ ) { A . data [ cnt * 4 + col ] /= scale ; } cnt ++ ; } } for ( int q = 0 ; q < 3 ; q ++ ) { CommonOps_DDRM . extract ( P2 , q , q + 1 , 0 , 4 , A , 2 , 0 ) ; for ( int col = 0 ; col < 4 ; col ++ ) { A . data [ 2 * 4 + col ] /= scale ; } for ( int r = 0 ; r < 3 ; r ++ ) { CommonOps_DDRM . extract ( P3 , r , r + 1 , 0 , 4 , A , 3 , 0 ) ; for ( int col = 0 ; col < 4 ; col ++ ) { A . data [ 3 * 4 + col ] /= scale ; } double v = CommonOps_DDRM . det ( A ) ; T . set ( q , r , sign * v * scale ) ; // scale is to the power of 2 , hence the * scale here
} } sign *= - 1 ; } return ret ; |
public class DateTimeFormatter { /** * Prints a ReadableInstant to a String .
* This method will use the override zone and the override chronology if
* they are set . Otherwise it will use the chronology and zone of the instant .
* @ param instant instant to format , null means now
* @ return the printed result */
public String print ( ReadableInstant instant ) { } } | StringBuilder buf = new StringBuilder ( requirePrinter ( ) . estimatePrintedLength ( ) ) ; try { printTo ( ( Appendable ) buf , instant ) ; } catch ( IOException ex ) { // StringBuilder does not throw IOException
} return buf . toString ( ) ; |
public class Checker { /** * < editor - fold defaultstate = " collapsed " desc = " HTML elements " > */
@ Override @ DefinedBy ( Api . COMPILER_TREE ) public Void visitStartElement ( StartElementTree tree , Void ignore ) { } } | final Name treeName = tree . getName ( ) ; final HtmlTag t = HtmlTag . get ( treeName ) ; if ( t == null ) { env . messages . error ( HTML , tree , "dc.tag.unknown" , treeName ) ; } else if ( t . allowedVersion != HtmlVersion . ALL && t . allowedVersion != env . htmlVersion ) { env . messages . error ( HTML , tree , "dc.tag.not.supported" , treeName ) ; } else { boolean done = false ; for ( TagStackItem tsi : tagStack ) { if ( tsi . tag . accepts ( t ) ) { while ( tagStack . peek ( ) != tsi ) { warnIfEmpty ( tagStack . peek ( ) , null ) ; tagStack . pop ( ) ; } done = true ; break ; } else if ( tsi . tag . endKind != HtmlTag . EndKind . OPTIONAL ) { done = true ; break ; } } if ( ! done && HtmlTag . BODY . accepts ( t ) ) { while ( ! tagStack . isEmpty ( ) ) { warnIfEmpty ( tagStack . peek ( ) , null ) ; tagStack . pop ( ) ; } } markEnclosingTag ( Flag . HAS_ELEMENT ) ; checkStructure ( tree , t ) ; // tag specific checks
switch ( t ) { // check for out of sequence headers , such as < h1 > . . . < / h1 > < h3 > . . . < / h3 >
case H1 : case H2 : case H3 : case H4 : case H5 : case H6 : checkHeader ( tree , t ) ; break ; } if ( t . flags . contains ( HtmlTag . Flag . NO_NEST ) ) { for ( TagStackItem i : tagStack ) { if ( t == i . tag ) { env . messages . warning ( HTML , tree , "dc.tag.nested.not.allowed" , treeName ) ; break ; } } } } // check for self closing tags , such as < a id = " name " / >
if ( tree . isSelfClosing ( ) ) { env . messages . error ( HTML , tree , "dc.tag.self.closing" , treeName ) ; } try { TagStackItem parent = tagStack . peek ( ) ; TagStackItem top = new TagStackItem ( tree , t ) ; tagStack . push ( top ) ; super . visitStartElement ( tree , ignore ) ; // handle attributes that may or may not have been found in start element
if ( t != null ) { switch ( t ) { case CAPTION : if ( parent != null && parent . tag == HtmlTag . TABLE ) parent . flags . add ( Flag . TABLE_HAS_CAPTION ) ; break ; case H1 : case H2 : case H3 : case H4 : case H5 : case H6 : if ( parent != null && ( parent . tag == HtmlTag . SECTION || parent . tag == HtmlTag . ARTICLE ) ) { parent . flags . add ( Flag . HAS_HEADING ) ; } break ; case IMG : if ( ! top . attrs . contains ( HtmlTag . Attr . ALT ) ) env . messages . error ( ACCESSIBILITY , tree , "dc.no.alt.attr.for.image" ) ; break ; } } return null ; } finally { if ( t == null || t . endKind == HtmlTag . EndKind . NONE ) tagStack . pop ( ) ; } |
public class InMemoryPartition { /** * UNSAFE ! ! overwrites record
* causes inconsistency or data loss for overwriting everything but records of the exact same size
* @ param pointer pointer to start of record
* @ param record record to overwrite old one with
* @ throws IOException
* @ deprecated Don ' t use this , overwrites record and causes inconsistency or data loss for
* overwriting everything but records of the exact same size */
@ Deprecated public void overwriteRecordAt ( long pointer , T record ) throws IOException { } } | long tmpPointer = this . writeView . getCurrentPointer ( ) ; this . writeView . resetTo ( pointer ) ; this . serializer . serialize ( record , this . writeView ) ; this . writeView . resetTo ( tmpPointer ) ; |
public class LessObject { /** * If cause is already a LessException then filename , line number and column of the current object are added to the less stacktrace .
* With any other type of exception a new LessException is created .
* @ param cause the cause
* @ return the exception */
@ Nonnull LessException createException ( Throwable cause ) { } } | LessException lessEx = cause . getClass ( ) == LessException . class ? ( LessException ) cause : new LessException ( cause ) ; lessEx . addPosition ( filename , line , column ) ; return lessEx ; |
public class ULocale { /** * < strong > [ icu ] < / strong > Returns a keyword value localized for display in the specified locale .
* This is a cover for the ICU4C API .
* @ param localeID the id of the locale whose keyword value is to be displayed .
* @ param keyword the keyword whose value is to be displayed .
* @ param displayLocale the id of the locale in which to display the value .
* @ return the localized value name . */
public static String getDisplayKeywordValue ( String localeID , String keyword , ULocale displayLocale ) { } } | return getDisplayKeywordValueInternal ( new ULocale ( localeID ) , keyword , displayLocale ) ; |
public class HadoopLocationWizard { /** * Performs any actions appropriate in response to the user having pressed
* the Finish button , or refuse if finishing now is not permitted .
* @ return the created or updated Hadoop location */
public HadoopServer performFinish ( ) { } } | try { if ( this . original == null ) { // New location
Display . getDefault ( ) . syncExec ( new Runnable ( ) { public void run ( ) { ServerRegistry . getInstance ( ) . addServer ( HadoopLocationWizard . this . location ) ; } } ) ; return this . location ; } else { // Update location
final String originalName = this . original . getLocationName ( ) ; this . original . load ( this . location ) ; Display . getDefault ( ) . syncExec ( new Runnable ( ) { public void run ( ) { ServerRegistry . getInstance ( ) . updateServer ( originalName , HadoopLocationWizard . this . location ) ; } } ) ; return this . original ; } } catch ( Exception e ) { e . printStackTrace ( ) ; setMessage ( "Invalid server location values" , IMessageProvider . ERROR ) ; return null ; } |
public class Pipes { /** * Extact one value from the selected pipe or an error if it doesn ' t exist ( NoSuchElementException ) .
* < pre >
* { @ code
* Queue < String > q = new Queue < > ( ) ;
* pipes . register ( " hello " , q ) ;
* pipes . push ( " hello " , " world " ) ;
* pipes . push ( " hello " , " world2 " ) ;
* pipes . oneOrError ( " hello " )
* . getValue ( ) / / " world "
* < / pre >
* @ param key : Adapter identifier
* @ return Xor containing lazy a NoSuchElementException an Adapter with the specified key does not exist ,
* or the next value from that Adapter */
public Either < Throwable , V > oneOrError ( final K key ) { } } | final ValueSubscriber < V > sub = ValueSubscriber . subscriber ( ) ; return get ( key ) . peek ( a -> a . stream ( ) . subscribe ( sub ) ) . map ( a -> sub . toEither ( ) ) . orElse ( Either . left ( new NoSuchElementException ( "no adapter for key " + key ) ) ) ; |
public class AbstractIoService { /** * { @ inheritDoc } */
@ Override public final void setHandler ( org . apache . mina . core . service . IoHandler handler ) { } } | if ( handler == null ) { throw new NullPointerException ( "handler cannot be null" ) ; } if ( isActive ( ) ) { throw new IllegalStateException ( "handler cannot be set while the service is active." ) ; } this . handler = handler ; |
public class PasswordUtil { /** * Encode the provided password by using the default encoding algorithm . The encoded string consists of the algorithm of the encoding and the encoded value .
* For example , { xor } CDo9Hgw = .
* If the decoded _ string is already encoded , the string will be decoded and then encoded by using the default encoding algorithm .
* Use this method for encoding the string by using the default encoding algorithm .
* @ param decoded _ string the string to be encoded .
* @ return The encoded string .
* @ throws InvalidPasswordEncodingException If the decoded _ string is null or invalid . Or the encoded _ string is null .
* @ throws UnsupportedCryptoAlgorithmException If the algorithm is not supported . */
public static String encode ( String decoded_string ) throws InvalidPasswordEncodingException , UnsupportedCryptoAlgorithmException { } } | return encode ( decoded_string , PasswordCipherUtil . getSupportedCryptoAlgorithms ( ) [ 0 ] , ( String ) null ) ; |
public class RamlControllerVisitor { /** * A method normalizing " resource " path . In RAML resource path must neither be empty ( " / " is used in this case ) ,
* not ends with " / " ( as all uri must start with " / " ) .
* @ param uri the uri to normalized
* @ return the normalized path */
private String normalizeParentPath ( String uri ) { } } | String relativeUri = extractRelativeUrl ( uri , null ) ; if ( relativeUri . endsWith ( "/" ) && relativeUri . length ( ) != 1 ) { relativeUri = StringUtils . removeEndIgnoreCase ( relativeUri , "/" ) ; } return relativeUri ; |
public class PrefsInterface { /** * Returns the default hidden preference for the user .
* @ return boolean hidden or not
* @ throws FlickrException */
public boolean getHidden ( ) throws FlickrException { } } | Map < String , Object > parameters = new HashMap < String , Object > ( ) ; parameters . put ( "method" , METHOD_GET_HIDDEN ) ; Response response = transportAPI . get ( transportAPI . getPath ( ) , parameters , apiKey , sharedSecret ) ; if ( response . isError ( ) ) { throw new FlickrException ( response . getErrorCode ( ) , response . getErrorMessage ( ) ) ; } Element personElement = response . getPayload ( ) ; return personElement . getAttribute ( "hidden" ) . equals ( "1" ) ? true : false ; |
public class SqlLoaderImpl { /** * キーが存在する場合は上書きを行わずに指定されたマップのすべてのマッピングをこのマップにコピーします
* @ param baseMap マージ先の対象データ ( マップの内容が更新されます )
* @ param map コピーの対象データ */
private void putAllIfAbsent ( final Map < String , String > baseMap , final Map < String , String > map ) { } } | for ( Map . Entry < String , String > entry : map . entrySet ( ) ) { if ( ! baseMap . containsKey ( entry . getKey ( ) ) ) { baseMap . put ( entry . getKey ( ) , entry . getValue ( ) ) ; } } |
public class JodaBeanReferencingBinReader { /** * parses the references */
private void parseClassDescriptions ( ) throws Exception { } } | int refCount = acceptInteger ( input . readByte ( ) ) ; if ( refCount < 0 ) { throw new IllegalArgumentException ( "Invalid binary data: Expected count of references, but was: " + refCount ) ; } refs = new Object [ refCount ] ; int classMapSize = acceptMap ( input . readByte ( ) ) ; classes = new ClassInfo [ classMapSize ] ; // Guaranteed non - negative by acceptMap ( )
classMap = new HashMap < > ( classMapSize ) ; for ( int position = 0 ; position < classMapSize ; position ++ ) { ClassInfo classInfo = parseClassInfo ( ) ; classes [ position ] = classInfo ; classMap . put ( classInfo . type , classInfo ) ; } |
public class CmsContentService { /** * Returns the RDF annotations required for in line editing . < p >
* @ param document the parent XML document
* @ param contentLocale the content locale
* @ param elementPath the element xpath to get the RDF annotation for
* @ return the RDFA */
public static String getRdfaAttributes ( I_CmsXmlDocument document , Locale contentLocale , String elementPath ) { } } | I_CmsXmlSchemaType schemaType = document . getContentDefinition ( ) . getSchemaType ( elementPath ) ; if ( schemaType != null ) { String path = "" ; if ( elementPath . contains ( "/" ) ) { path += "/" + removePathIndexes ( elementPath . substring ( 0 , elementPath . lastIndexOf ( "/" ) ) + ":" ) ; } path += getTypeUri ( schemaType . getContentDefinition ( ) ) + "/" + elementPath ; return String . format ( RDFA_ATTRIBUTES , CmsContentDefinition . uuidToEntityId ( document . getFile ( ) . getStructureId ( ) , contentLocale . toString ( ) ) , path ) ; } else { return "" ; } |
public class InterfaceEndpointsInner { /** * Deletes the specified interface endpoint .
* @ param resourceGroupName The name of the resource group .
* @ param interfaceEndpointName The name of the interface endpoint .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ throws CloudException thrown if the request is rejected by server
* @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent */
public void delete ( String resourceGroupName , String interfaceEndpointName ) { } } | deleteWithServiceResponseAsync ( resourceGroupName , interfaceEndpointName ) . toBlocking ( ) . last ( ) . body ( ) ; |
public class AmazonCodeDeployClient { /** * Gets information about one or more on - premises instances .
* @ param batchGetOnPremisesInstancesRequest
* Represents the input of a BatchGetOnPremisesInstances operation .
* @ return Result of the BatchGetOnPremisesInstances operation returned by the service .
* @ throws InstanceNameRequiredException
* An on - premises instance name was not specified .
* @ throws InvalidInstanceNameException
* The on - premises instance name was specified in an invalid format .
* @ throws BatchLimitExceededException
* The maximum number of names or IDs allowed for this request ( 100 ) was exceeded .
* @ sample AmazonCodeDeploy . BatchGetOnPremisesInstances
* @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / codedeploy - 2014-10-06 / BatchGetOnPremisesInstances "
* target = " _ top " > AWS API Documentation < / a > */
@ Override public BatchGetOnPremisesInstancesResult batchGetOnPremisesInstances ( BatchGetOnPremisesInstancesRequest request ) { } } | request = beforeClientExecution ( request ) ; return executeBatchGetOnPremisesInstances ( request ) ; |
public class Grammar { /** * Gets the precedence of the associated infix handler .
* @ param token The token whose handler we want precedence for
* @ return the precedence of the associated infix handler or 0 if there is none . */
public int precedence ( ParserToken token ) { } } | if ( isInfix ( token ) ) { return infixHandlers . get ( token . type ) . precedence ( ) ; } return 0 ; |
public class StartExportTaskRequest { /** * The file format for the returned export data . Default value is < code > CSV < / code > . < b > Note : < / b > < i > The < / i >
* < code > GRAPHML < / code > < i > option has been deprecated . < / i >
* < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use
* { @ link # setExportDataFormat ( java . util . Collection ) } or { @ link # withExportDataFormat ( java . util . Collection ) } if you
* want to override the existing values .
* @ param exportDataFormat
* The file format for the returned export data . Default value is < code > CSV < / code > . < b > Note : < / b > < i > The < / i >
* < code > GRAPHML < / code > < i > option has been deprecated . < / i >
* @ return Returns a reference to this object so that method calls can be chained together .
* @ see ExportDataFormat */
public StartExportTaskRequest withExportDataFormat ( String ... exportDataFormat ) { } } | if ( this . exportDataFormat == null ) { setExportDataFormat ( new java . util . ArrayList < String > ( exportDataFormat . length ) ) ; } for ( String ele : exportDataFormat ) { this . exportDataFormat . add ( ele ) ; } return this ; |
public class BinaryJedis { /** * INCRBY work just like { @ link # incr ( byte [ ] ) INCR } but instead to increment by 1 the increment is
* integer .
* INCR commands are limited to 64 bit signed integers .
* Note : this is actually a string operation , that is , in Redis there are not " integer " types .
* Simply the string stored at the key is parsed as a base 10 64 bit signed integer , incremented ,
* and then converted back as a string .
* Time complexity : O ( 1)
* @ see # incr ( byte [ ] )
* @ see # decr ( byte [ ] )
* @ see # decrBy ( byte [ ] , long )
* @ param key
* @ param increment
* @ return Integer reply , this commands will reply with the new value of key after the increment . */
@ Override public Long incrBy ( final byte [ ] key , final long increment ) { } } | checkIsInMultiOrPipeline ( ) ; client . incrBy ( key , increment ) ; return client . getIntegerReply ( ) ; |
public class RoleDAO { /** * Add a single Permission to the Role ' s Permission Collection
* @ param trans
* @ param role
* @ param perm
* @ param type
* @ param action
* @ return */
public Result < Void > addPerm ( AuthzTrans trans , RoleDAO . Data role , PermDAO . Data perm ) { } } | // Note : Prepared Statements for Collection updates aren ' t supported
String pencode = perm . encode ( ) ; try { getSession ( trans ) . execute ( UPDATE_SP + TABLE + " SET perms = perms + {'" + pencode + "'} WHERE " + "ns = '" + role . ns + "' AND name = '" + role . name + "';" ) ; } catch ( DriverException | APIException | IOException e ) { reportPerhapsReset ( trans , e ) ; return Result . err ( Result . ERR_Backend , CassAccess . ERR_ACCESS_MSG ) ; } wasModified ( trans , CRUD . update , role , "Added permission " + pencode + " to role " + role . fullName ( ) ) ; return Result . ok ( ) ; |
public class AjaxAddableTabbedPanel { /** * On new tab .
* @ param target
* the target
* @ param tab
* the tab
* @ param index
* the index */
public void onNewTab ( final AjaxRequestTarget target , final T tab , final int index ) { } } | if ( ( index < 0 ) || ( index >= getTabs ( ) . size ( ) ) ) { throw new IndexOutOfBoundsException ( ) ; } getTabs ( ) . add ( index , tab ) ; setSelectedTab ( index ) ; target . add ( this ) ; |
public class EncryptedCachedDiskStringsTable { /** * Adds a string to the table on disk
* @ param str string to store
* @ param os OutputStream to use to write it
* @ throws IOException */
private void addString ( String str , OutputStream os ) throws IOException { } } | if ( this . cacheSize >= 0 ) { // add to disk
byte [ ] strbytes = str . getBytes ( EncryptedCachedDiskStringsTable . encoding ) ; byte [ ] sizeOfStr = ByteBuffer . allocate ( 4 ) . putInt ( strbytes . length ) . array ( ) ; this . stringPositionInFileList . add ( this . tempFileSize ) ; if ( os != null ) { os . write ( sizeOfStr ) ; os . write ( strbytes ) ; } else { // we can write to the random access file
FileChannel fc = this . tempRAF . getChannel ( ) . position ( this . tempFileSize ) ; fc . write ( ByteBuffer . wrap ( sizeOfStr ) ) ; fc . write ( ByteBuffer . wrap ( strbytes ) ) ; } this . tempFileSize += sizeOfStr . length + strbytes . length ; } if ( this . cacheSize < 0 ) { // put it into cache
this . cache . put ( this . currentItem , str ) ; this . currentItem ++ ; } else if ( ( this . cacheSize > 0 ) && ( this . currentItem < this . cacheSize ) ) { // put the first items already into
// cache
this . cache . put ( this . currentItem , str ) ; this . currentItem ++ ; } |
public class ObjectManager { /** * Locate a transaction registered with this ObjectManager .
* with the same XID as the one passed .
* If a null XID is passed this will return any registered transaction with a null XID .
* @ param XID Xopen identifier .
* @ return Transaction identified by the XID .
* @ throws ObjectManagerException */
public final Transaction getTransactionByXID ( byte [ ] XID ) throws ObjectManagerException { } } | if ( Tracing . isAnyTracingEnabled ( ) && trace . isEntryEnabled ( ) ) trace . entry ( this , cclass , "getTransactionByXID" , "XIDe=" + XID + "(byte[]" ) ; Transaction transaction = objectManagerState . getTransactionByXID ( XID ) ; if ( Tracing . isAnyTracingEnabled ( ) && trace . isEntryEnabled ( ) ) trace . exit ( this , cclass , "getTransactionByXID" , "returns transaction=" + transaction + "(Transaction)" ) ; return transaction ; |
public class Minutes { /** * Subtracts this amount from the specified temporal object .
* This returns a temporal object of the same observable type as the input
* with this amount subtracted .
* In most cases , it is clearer to reverse the calling pattern by using
* { @ link Temporal # minus ( TemporalAmount ) } .
* < pre >
* / / these two lines are equivalent , but the second approach is recommended
* dateTime = thisAmount . subtractFrom ( dateTime ) ;
* dateTime = dateTime . minus ( thisAmount ) ;
* < / pre >
* Only non - zero amounts will be subtracted .
* This instance is immutable and unaffected by this method call .
* @ param temporal the temporal object to adjust , not null
* @ return an object of the same type with the adjustment made , not null
* @ throws DateTimeException if unable to subtract
* @ throws UnsupportedTemporalTypeException if the MINUTES unit is not supported
* @ throws ArithmeticException if numeric overflow occurs */
@ Override public Temporal subtractFrom ( Temporal temporal ) { } } | if ( minutes != 0 ) { temporal = temporal . minus ( minutes , MINUTES ) ; } return temporal ; |
public class Search { /** * Group results by the specified field .
* @ param fieldName by which to group results
* @ param isNumber whether field isNumeric .
* @ return this for additional parameter setting or to query */
public Search groupField ( String fieldName , boolean isNumber ) { } } | assertNotEmpty ( fieldName , "fieldName" ) ; if ( isNumber ) { databaseHelper . query ( "group_field" , fieldName + "<number>" ) ; } else { databaseHelper . query ( "group_field" , fieldName ) ; } return this ; |
public class ChronicleMapBuilder { /** * Configures the average number of bytes , taken by serialized form of values , put into maps ,
* created by this builder . However , in many cases { @ link # averageValue ( Object ) } might be easier
* to use and more reliable . If value size is always the same , call { @ link
* # constantValueSizeBySample ( Object ) } method instead of this one .
* < p > { @ code ChronicleHashBuilder } implementation heuristically chooses { @ linkplain
* # actualChunkSize ( int ) the actual chunk size } based on this configuration and the key size ,
* that , however , might result to quite high internal fragmentation , i . e . losses because only
* integral number of chunks could be allocated for the entry . If you want to avoid this , you
* should manually configure the actual chunk size in addition to this average value size
* configuration , which is anyway needed .
* < p > If values are of boxed primitive type or { @ link Byteable } subclass , i . e . if value size is
* known statically , it is automatically accounted and shouldn ' t be specified by user .
* < p > Calling this method clears any previous { @ link # constantValueSizeBySample ( Object ) } and
* { @ link # averageValue ( Object ) } configurations .
* @ param averageValueSize number of bytes , taken by serialized form of values
* @ return this builder back
* @ throws IllegalStateException if value size is known statically and shouldn ' t be
* configured by user
* @ throws IllegalArgumentException if the given { @ code averageValueSize } is non - positive
* @ see # averageValue ( Object )
* @ see # constantValueSizeBySample ( Object )
* @ see # averageKeySize ( double )
* @ see # actualChunkSize ( int ) */
public ChronicleMapBuilder < K , V > averageValueSize ( double averageValueSize ) { } } | checkSizeIsStaticallyKnown ( valueBuilder , "Value" ) ; checkAverageSize ( averageValueSize , "value" ) ; this . averageValueSize = averageValueSize ; averageValue = null ; sampleValue = null ; return this ; |
public class PrintStreamMonitor { /** * Notify the monitor of the an error during the download
* process .
* @ param resource the name of the remote resource .
* @ param message a non - localized message describing the problem in english . */
public void notifyError ( URL resource , String message ) { } } | m_out . println ( resource . toExternalForm ( ) + " : " + message ) ; |
public class StAXEncoder { /** * Writes a processing instruction
* ( non - Javadoc )
* @ see
* javax . xml . stream . XMLStreamWriter # writeProcessingInstruction ( java . lang
* . String , java . lang . String ) */
public void writeProcessingInstruction ( String target , String data ) throws XMLStreamException { } } | if ( preservePI ) { try { this . checkPendingATEvents ( ) ; encoder . encodeProcessingInstruction ( target , data ) ; } catch ( Exception e ) { throw new XMLStreamException ( e . getLocalizedMessage ( ) , e ) ; } } |
public class Skip32 { /** * Decrypts the provided value using the specified key
* The key should be a byte array of 10 elements .
* @ param value
* @ param key
* @ return The decrypted value */
public static int decrypt ( int value , byte [ ] key ) { } } | int [ ] buf = new int [ 4 ] ; buf [ 0 ] = ( ( value >> 24 ) & 0xff ) ; buf [ 1 ] = ( ( value >> 16 ) & 0xff ) ; buf [ 2 ] = ( ( value >> 8 ) & 0xff ) ; buf [ 3 ] = ( ( value >> 0 ) & 0xff ) ; skip32 ( key , buf , false ) ; int out = ( ( buf [ 0 ] ) << 24 ) | ( ( buf [ 1 ] ) << 16 ) | ( ( buf [ 2 ] ) << 8 ) | ( buf [ 3 ] ) ; return out ; |
public class DaVinci { /** * Try to load [ path ] image from cache
* @ param path Path or Url of the bitmap
* @ return Bitmap from cache if founded */
private Bitmap loadFromLruCache ( final String path , boolean tryFromDisk ) { } } | int key = getKey ( path ) ; Bitmap bitmap = mImagesCache . get ( key ) ; // try to retrieve from lruCache
Log . d ( TAG , "bitmap " + path + " from lruCache [" + key + "] " + bitmap ) ; if ( tryFromDisk && bitmap == null ) { bitmap = loadFromDiskLruCache ( key ) ; // try to retrieve from disk cache
Log . d ( TAG , "bitmap " + path + " from diskLruCache " + bitmap ) ; if ( bitmap != null ) { // if found on disk cache
mImagesCache . put ( key , bitmap ) ; // save it into lruCache
} } return bitmap ; |
public class DssatCommonOutput { /** * Get experiment name without any extention content after first underscore
* @ param result date holder for experiment data
* @ return experiment name */
protected String getExName ( Map result ) { } } | String ret = getValueOr ( result , "exname" , "" ) ; if ( ret . matches ( "\\w+\\.\\w{2}[Xx]" ) ) { ret = ret . substring ( 0 , ret . length ( ) - 1 ) . replace ( "." , "" ) ; } // TODO need to be updated with a translate rule for other models ' exname
if ( ret . matches ( ".+(_+\\d+)+$" ) ) { ret = ret . replaceAll ( "(_+\\d+)+$" , "" ) ; } return ret ; |
public class AppBndAuthorizationTableService { /** * Register the webcontainer ' s default delegation provider .
* @ param cc */
private void registerDefaultDelegationProvider ( ComponentContext cc ) { } } | defaultDelegationProvider = new DefaultDelegationProvider ( ) ; defaultDelegationProvider . setSecurityService ( securityServiceRef . getService ( ) ) ; defaultDelegationProvider . setIdentityStoreHandlerService ( identityStoreHandlerServiceRef ) ; BundleContext bc = cc . getBundleContext ( ) ; Dictionary < String , Object > props = new Hashtable < String , Object > ( ) ; props . put ( "type" , "defaultProvider" ) ; defaultDelegationProviderReg = bc . registerService ( DelegationProvider . class , defaultDelegationProvider , props ) ; |
public class LUDecomposition { /** * Return lower triangular factor
* @ return L */
public double [ ] [ ] getL ( ) { } } | double [ ] [ ] L = new double [ m ] [ n ] ; L [ 0 ] [ 0 ] = 1. ; for ( int i = 1 ; i < m ; i ++ ) { final double [ ] Li = L [ i ] ; System . arraycopy ( LU [ i ] , 0 , Li , 0 , Math . min ( i , n ) ) ; if ( i < n ) { Li [ i ] = 1. ; } } return L ; |
public class AuditUtil { /** * Transform map .
* @ param paramMap
* the param map
* @ return the map */
public static Map < String , String > transformMap ( final Map < String , Object > paramMap ) { } } | final Map < String , String > paramStrMap = new LinkedHashMap < String , String > ( ) ; for ( final Map . Entry < String , Object > entry : paramMap . entrySet ( ) ) { paramStrMap . put ( entry . getKey ( ) , entry . getValue ( ) . toString ( ) ) ; } return paramStrMap ; |
public class CommerceAddressLocalServiceUtil { /** * Updates the commerce address in the database or adds it if it does not yet exist . Also notifies the appropriate model listeners .
* @ param commerceAddress the commerce address
* @ return the commerce address that was updated */
public static com . liferay . commerce . model . CommerceAddress updateCommerceAddress ( com . liferay . commerce . model . CommerceAddress commerceAddress ) { } } | return getService ( ) . updateCommerceAddress ( commerceAddress ) ; |
public class XGenBuff { /** * Normal case of building up Cached HTML without transaction info
* @ param cache
* @ param code
* @ throws APIException
* @ throws IOException */
public void run ( Cache < G > cache , Code < G > code ) throws APIException , IOException { } } | code . code ( cache , xgen ) ; |
public class SntpMessage { /** * NTP - Timestamp aus byte - Array dekodieren */
private static double decode ( byte [ ] data , int pointer ) { } } | long ntp = 0L ; for ( int i = 0 ; i < 8 ; i ++ ) { long unsigned = ( data [ i + pointer ] & 0xFF ) ; ntp |= ( unsigned << ( 56 - i * 8 ) ) ; } // Festkomma vor Bit 32 , deshalb Bits nach rechts schieben
long integer = ( ( ntp >>> 32 ) & 0xFFFFFFFFL ) ; long fraction = ( ( ( ntp & 0xFFFFFFFFL ) * MIO ) >>> 32 ) ; long off = ( ( ( integer & 0x80000000L ) == 0 ) ? OFFSET_2036 : OFFSET_1900 ) ; long ut1 = ( integer * MIO ) + fraction - off ; return ( ( ut1 + OFFSET_1900 ) / MIO_AS_DOUBLE ) ; |
public class FlowLayoutA { /** * { @ inheritDoc } */
@ Override protected void onLayout ( boolean changed , int l , int t , int r , int b ) { } } | mLines . clear ( ) ; mLineHeights . clear ( ) ; mLineMargins . clear ( ) ; int width = getWidth ( ) ; int height = getHeight ( ) ; int linesSum = getPaddingTop ( ) ; int lineWidth = 0 ; int lineHeight = 0 ; List < View > lineViews = new ArrayList < View > ( ) ; float horizontalGravityFactor ; switch ( ( mGravity & Gravity . HORIZONTAL_GRAVITY_MASK ) ) { case Gravity . LEFT : default : horizontalGravityFactor = 0 ; break ; case Gravity . CENTER_HORIZONTAL : horizontalGravityFactor = .5f ; break ; case Gravity . RIGHT : horizontalGravityFactor = 1 ; break ; } for ( int i = 0 ; i < getChildCount ( ) ; i ++ ) { View child = getChildAt ( i ) ; if ( child . getVisibility ( ) == View . GONE ) { continue ; } LayoutParams lp = ( LayoutParams ) child . getLayoutParams ( ) ; int childWidth = child . getMeasuredWidth ( ) + lp . leftMargin + lp . rightMargin ; int childHeight = child . getMeasuredHeight ( ) + lp . bottomMargin + lp . topMargin ; if ( lineWidth + childWidth > width ) { mLineHeights . add ( lineHeight ) ; mLines . add ( lineViews ) ; mLineMargins . add ( ( int ) ( ( width - lineWidth ) * horizontalGravityFactor ) + getPaddingLeft ( ) ) ; linesSum += lineHeight ; lineHeight = 0 ; lineWidth = 0 ; lineViews = new ArrayList < View > ( ) ; } lineWidth += childWidth ; lineHeight = Math . max ( lineHeight , childHeight ) ; lineViews . add ( child ) ; } mLineHeights . add ( lineHeight ) ; mLines . add ( lineViews ) ; mLineMargins . add ( ( int ) ( ( width - lineWidth ) * horizontalGravityFactor ) + getPaddingLeft ( ) ) ; linesSum += lineHeight ; int verticalGravityMargin = 0 ; switch ( ( mGravity & Gravity . VERTICAL_GRAVITY_MASK ) ) { case Gravity . TOP : default : break ; case Gravity . CENTER_VERTICAL : verticalGravityMargin = ( height - linesSum ) / 2 ; break ; case Gravity . BOTTOM : verticalGravityMargin = height - linesSum ; break ; } int numLines = mLines . size ( ) ; int left ; int top = getPaddingTop ( ) ; for ( int i = 0 ; i < numLines ; i ++ ) { lineHeight = mLineHeights . get ( i ) ; lineViews = mLines . get ( i ) ; left = mLineMargins . get ( i ) ; int children = lineViews . size ( ) ; for ( int j = 0 ; j < children ; j ++ ) { View child = lineViews . get ( j ) ; if ( child . getVisibility ( ) == View . GONE ) { continue ; } LayoutParams lp = ( LayoutParams ) child . getLayoutParams ( ) ; // if height is match _ parent we need to remeasure child to line height
if ( lp . height == LayoutParams . MATCH_PARENT ) { int childWidthMode = MeasureSpec . AT_MOST ; int childWidthSize = lineWidth ; if ( lp . width == LayoutParams . MATCH_PARENT ) { childWidthMode = MeasureSpec . EXACTLY ; } else if ( lp . width >= 0 ) { childWidthMode = MeasureSpec . EXACTLY ; childWidthSize = lp . width ; } child . measure ( MeasureSpec . makeMeasureSpec ( childWidthSize , childWidthMode ) , MeasureSpec . makeMeasureSpec ( lineHeight - lp . topMargin - lp . bottomMargin , MeasureSpec . EXACTLY ) ) ; } int childWidth = child . getMeasuredWidth ( ) ; int childHeight = child . getMeasuredHeight ( ) ; int gravityMargin = 0 ; if ( Gravity . isVertical ( lp . gravity ) ) { switch ( lp . gravity ) { case Gravity . TOP : default : break ; case Gravity . CENTER_VERTICAL : case Gravity . CENTER : gravityMargin = ( lineHeight - childHeight - lp . topMargin - lp . bottomMargin ) / 2 ; break ; case Gravity . BOTTOM : gravityMargin = lineHeight - childHeight - lp . topMargin - lp . bottomMargin ; break ; } } child . layout ( left + lp . leftMargin , top + lp . topMargin + gravityMargin + verticalGravityMargin , left + childWidth + lp . leftMargin , top + childHeight + lp . topMargin + gravityMargin + verticalGravityMargin ) ; left += childWidth + lp . leftMargin + lp . rightMargin ; } top += lineHeight ; } |
public class SchemaBuilder { /** * Shortcut for { @ link # dropAggregate ( CqlIdentifier , CqlIdentifier )
* dropAggregate ( CqlIdentifier . fromCql ( keyspace ) , CqlIdentifier . fromCql ( aggregateName ) } . */
@ NonNull public static Drop dropAggregate ( @ Nullable String keyspace , @ NonNull String aggregateName ) { } } | return new DefaultDrop ( keyspace == null ? null : CqlIdentifier . fromCql ( keyspace ) , CqlIdentifier . fromCql ( aggregateName ) , "AGGREGATE" ) ; |
public class ZipFileArtifactNotifier { /** * Update the monitor service according to whether any listeners
* are registered . That is , if any covering paths are present .
* When listeners are registered , register the file monitor as
* a service . When no listeners are registered , unregister the
* file monitor as a service .
* Registration as a service is only done when the notifier is
* an exposed notifier . For a non - exposed notifier , see
* { @ link # updateEnclosingMonitor } . */
@ FFDCIgnore ( IllegalStateException . class ) private void updateMonitorService ( ) { } } | if ( ! coveringPaths . isEmpty ( ) ) { if ( service == null ) { try { // If we are shutting down , we want to generate the exception quickly .
BundleContext bundleContext = getContainerFactoryHolder ( ) . getBundleContext ( ) ; // throws ' IllegalStateException '
setServiceProperties ( ) ; service = bundleContext . registerService ( FileMonitor . class , this , serviceProperties ) ; // See comments on ' loadZipEntries ' for why the entries must be loaded now .
loadZipEntries ( ) ; } catch ( IllegalStateException e ) { // Ignore ; the framework is shutting down .
} } else { // Do nothing : There is already a service registration .
} } else { if ( service != null ) { try { service . unregister ( ) ; } catch ( IllegalStateException e ) { // Ignore ; framework is shutting down .
} service = null ; } else { // Do nothing : There is already no service registration .
} } |
public class GCCBEZImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
@ Override public boolean eIsSet ( int featureID ) { } } | switch ( featureID ) { case AfplibPackage . GCCBEZ__RG : return rg != null && ! rg . isEmpty ( ) ; } return super . eIsSet ( featureID ) ; |
public class ColumnCounts_DSCC { /** * Initializes class data structures and parameters */
void initialize ( DMatrixSparseCSC A ) { } } | m = A . numRows ; n = A . numCols ; int s = 4 * n + ( ata ? ( n + m + 1 ) : 0 ) ; gw . reshape ( s ) ; w = gw . data ; // compute the transpose of A
At . reshape ( A . numCols , A . numRows , A . nz_length ) ; CommonOps_DSCC . transpose ( A , At , gw ) ; // initialize w
Arrays . fill ( w , 0 , s , - 1 ) ; // assign all values in workspace to - 1
ancestor = 0 ; maxfirst = n ; prevleaf = 2 * n ; first = 3 * n ; |
public class FrameOutputWriter { /** * Construct FrameOutputWriter object and then use it to generate the Html
* file which will have the description of all the frames in the
* documentation . The name of the generated file is " index . html " which is
* the default first file for Html documents .
* @ throws DocletAbortException */
public static void generate ( ConfigurationImpl configuration ) { } } | FrameOutputWriter framegen ; DocPath filename = DocPath . empty ; try { filename = DocPaths . INDEX ; framegen = new FrameOutputWriter ( configuration , filename ) ; framegen . generateFrameFile ( ) ; framegen . close ( ) ; } catch ( IOException exc ) { configuration . standardmessage . error ( "doclet.exception_encountered" , exc . toString ( ) , filename ) ; throw new DocletAbortException ( exc ) ; } |
public class TraceNLSResolver { /** * Common method to use Tr to log that something above couldn ' t be resolved .
* This method further checks whether or not the
* < code > com . ibm . ejs . ras . debugTraceNLSResolver < / code > property has been set
* before calling Tr to log the event .
* @ param message
* Event message
* @ param args
* Parameters for message formatter */
protected final static void logEvent ( String message , Object [ ] args ) { } } | // if ( makeNoise & & tc . isEventEnabled ( ) )
// if ( args = = null )
// com . ibm . websphere . ras . Tr . event ( tc , message ) ;
// else
// com . ibm . websphere . ras . Tr . event ( tc , MessageFormat . format ( message ,
// args ) ) ;
System . err . println ( "com.ibm.ejs.ras.hpel.TraceNLSResolver: " + MessageFormat . format ( message , args ) ) ; Thread . dumpStack ( ) ; |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.