signature stringlengths 43 39.1k | implementation stringlengths 0 450k |
|---|---|
public class CmsJspVfsAccessBean { /** * Creates a new instance of the JSP VFS access utility bean . < p >
* To prevent multiple creations of the bean during a request , the OpenCms request context
* attributes are used to cache the created VFS access utility bean . < p >
* @ param cms the current OpenCms user context
* @ return a new instance of the JSP VFS access utility bean */
public static CmsJspVfsAccessBean create ( CmsObject cms ) { } } | CmsJspVfsAccessBean result ; Object attribute = cms . getRequestContext ( ) . getAttribute ( ATTRIBUTE_VFS_ACCESS_BEAN ) ; if ( attribute != null ) { result = ( CmsJspVfsAccessBean ) attribute ; } else { result = new CmsJspVfsAccessBean ( cms ) ; cms . getRequestContext ( ) . setAttribute ( ATTRIBUTE_VFS_ACCESS_BEAN , result ) ; } return result ; |
public class CharsetUtil { /** * 转换为Charset对象
* @ param charsetName 字符集 , 为空则返回默认字符集
* @ return Charset
* @ throws UnsupportedCharsetException 编码不支持 */
public static Charset charset ( String charsetName ) throws UnsupportedCharsetException { } } | return StrUtil . isBlank ( charsetName ) ? Charset . defaultCharset ( ) : Charset . forName ( charsetName ) ; |
public class Serializer { /** * Escape XML reserved chars and write as element text content . Used by text and numbering operators to actually write the
* element text content . At the moment this method is invoked templates engine was already wrote start tag , including
* closing tag mark , e . g . < em > & lt ; div attrs . . . & gt ; < / em > .
* @ param text element text content .
* @ throws IOException if underlying writer fails to write . */
void writeTextContent ( String text ) throws IOException { } } | writer . write ( ( String ) Strings . escapeXML ( text ) ) ; |
public class XML { /** * Writes the xml mapping file starting from xmlJmapper object .
* @ return this instance of XML */
public XML write ( ) { } } | try { FilesManager . write ( xmlJmapper , xmlPath ) ; } catch ( IOException e ) { JmapperLog . error ( e ) ; } return this ; |
public class JRemoteComboBox { /** * Free this object ' s resources . */
public void free ( ) { } } | if ( m_record != null ) if ( ( m_record . getOwner ( ) == null ) || ( m_record . getOwner ( ) == this ) ) m_record . free ( ) ; // This will release the remote session ( if there is one ) .
m_record = null ; |
public class DataFrameJoiner { /** * Joins the joiner to the table2 , using the given column for the second table and returns the resulting table
* @ param table2 The table to join with
* @ param col2Name The column to join on . If col2Name refers to a double column , the join is performed after
* rounding to integers .
* @ return The resulting table */
public Table rightOuter ( Table table2 , String col2Name ) { } } | return rightOuter ( table2 , false , col2Name ) ; |
public class MtasSolrStatus { /** * Key .
* @ return the string */
public final String key ( ) { } } | key = ( key == null ) ? UUID . randomUUID ( ) . toString ( ) : key ; return key ; |
public class Updater { /** * Execute an async request using specified client .
* @ param client client used to make request
* @ return future that resolves to requested object */
public ListenableFuture < T > updateAsync ( final TwilioRestClient client ) { } } | return Twilio . getExecutorService ( ) . submit ( new Callable < T > ( ) { public T call ( ) { return update ( client ) ; } } ) ; |
public class AbstractCommandLineRunner { /** * Create a writer with the legacy output charset . */
@ GwtIncompatible ( "Unnecessary" ) private Writer streamToLegacyOutputWriter ( OutputStream stream ) throws IOException { } } | if ( legacyOutputCharset == null ) { return new BufferedWriter ( new OutputStreamWriter ( stream , UTF_8 ) ) ; } else { return new BufferedWriter ( new OutputStreamWriter ( stream , legacyOutputCharset ) ) ; } |
public class ListListModel { /** * Set the value of a list element at the specified index .
* @ param index of element to set
* @ param element New element value
* @ return old element value */
public Object set ( int index , Object element ) { } } | Object oldObject = items . set ( index , element ) ; if ( hasChanged ( oldObject , element ) ) { fireContentsChanged ( index ) ; } return oldObject ; |
public class ThreadPoolController { /** * Adjust the size of the thread pool .
* @ param poolSize the current pool size
* @ param poolAdjustment the change to make to the pool
* @ return the new pool size */
int adjustPoolSize ( int poolSize , int poolAdjustment ) { } } | if ( threadPool == null ) return poolSize ; // arguably should return 0 , but " least change " is safer . . . This happens during shutdown .
int newPoolSize = poolSize + poolAdjustment ; lastAction = LastAction . NONE ; if ( poolAdjustment != 0 ) { // don ' t shrink below coreThreads
if ( poolAdjustment < 0 && newPoolSize >= coreThreads ) { lastAction = LastAction . SHRINK ; setPoolSize ( newPoolSize ) ; } else if ( poolAdjustment > 0 && newPoolSize <= maxThreads ) { lastAction = LastAction . GROW ; setPoolSize ( newPoolSize ) ; } else { newPoolSize = poolSize ; } } return newPoolSize ; |
public class ContainerRegistryAuthSupplier { /** * Get an accessToken to use , possibly refreshing the token if it expires within the
* minimumExpiryMillis . */
private AccessToken getAccessToken ( ) throws IOException { } } | // synchronize attempts to refresh the accessToken
synchronized ( credentials ) { if ( needsRefresh ( credentials . getAccessToken ( ) ) ) { credentialRefresher . refresh ( credentials ) ; } } return credentials . getAccessToken ( ) ; |
public class Conversation { /** * Made public for testing . There is no other reason to use this method directly . */
public void storeInteractionManifest ( String interactionManifest ) { } } | try { InteractionManifest payload = new InteractionManifest ( interactionManifest ) ; Interactions interactions = payload . getInteractions ( ) ; Targets targets = payload . getTargets ( ) ; if ( interactions != null && targets != null ) { setTargets ( targets . toString ( ) ) ; setInteractions ( interactions . toString ( ) ) ; } else { ApptentiveLog . e ( CONVERSATION , "Unable to save InteractionManifest." ) ; } } catch ( JSONException e ) { ApptentiveLog . w ( CONVERSATION , "Invalid InteractionManifest received." ) ; logException ( e ) ; } |
public class StaticSelectEkstaziMojo { /** * INTERNAL */
protected List < String > computeNonAffectedClasses ( ) { } } | List < String > nonAffectedClasses = new ArrayList ( ) ; if ( ! getForceall ( ) ) { // Create excludes list ; we assume that all files are in
// the parentdir .
nonAffectedClasses = AffectedChecker . findNonAffectedClasses ( parentdir , getRootDirOption ( ) ) ; // Do not exclude recently failing tests if appropriate
// argument is provided .
if ( getForcefailing ( ) ) { List < String > recentlyFailingClasses = AffectedChecker . findRecentlyFailingClasses ( parentdir , getRootDirOption ( ) ) ; nonAffectedClasses . removeAll ( recentlyFailingClasses ) ; } } return nonAffectedClasses ; |
public class OptionsBuilder { /** * load supported options from command output . then modify some options
* manually . step 1 : launch raspistill command to load supported options step 2:
* configurate zero long options
* @ param name
* raspistill raspivid . . . raspberrypi camera command line name */
public static Options create ( String name ) { } } | synchronized ( SINGLETONS ) { if ( SINGLETONS . containsKey ( name ) ) { return SINGLETONS . get ( name ) ; } Options options = new Options ( ) ; List < String > lines = CommanderUtil . execute ( name ) ; for ( String line : lines ) { if ( LOGGER . isDebugEnabled ( ) ) { LOGGER . debug ( line ) ; } if ( ! line . startsWith ( "-" ) ) { continue ; } int indexOfDesc = line . indexOf ( ":" ) ; String opts [ ] = line . substring ( 0 , indexOfDesc ) . trim ( ) . split ( "," ) ; String desc = line . substring ( indexOfDesc + 1 ) . trim ( ) ; options . addOption ( new Option ( opts [ 0 ] . trim ( ) . substring ( 1 ) , opts [ 1 ] . trim ( ) . substring ( 2 ) , true , desc ) ) ; } SINGLETONS . put ( name , options ) ; for ( String opt : NO_VALUE_OPTIONS ) { Option option = options . getOption ( opt ) ; if ( option != null ) { option . setArgs ( 0 ) ; } } return options ; } |
public class Layout { /** * Returns true if the given layout matches this one . Layout ID ,
* generation , and creation info is not considered in the comparison . */
public boolean equalLayouts ( Layout layout ) throws FetchException { } } | if ( this == layout ) { return true ; } return getStorableTypeName ( ) . equals ( layout . getStorableTypeName ( ) ) && getAllProperties ( ) . equals ( layout . getAllProperties ( ) ) && Arrays . equals ( mStoredLayout . getExtraData ( ) , layout . mStoredLayout . getExtraData ( ) ) ; |
public class ObjectFactory2 { /** * Create an instance of { @ link JAXBElement } { @ code < } { @ link MentionOf } { @ code > } } */
@ XmlElementDecl ( namespace = "http://www.w3.org/ns/prov#" , name = "mentionOf" ) public JAXBElement < MentionOf > createMentionOf ( MentionOf value ) { } } | return new JAXBElement < MentionOf > ( _MentionOf_QNAME , MentionOf . class , null , value ) ; |
public class KerasModelBuilder { /** * Set model architecture from input stream of model YAML .
* @ param modelYamlInputStream Input stream of model YAML
* @ return Model builder
* @ throws IOException I / O exception */
public KerasModelBuilder modelYamlInputStream ( InputStream modelYamlInputStream ) throws IOException { } } | ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream ( ) ; IOUtils . copy ( modelYamlInputStream , byteArrayOutputStream ) ; this . modelJson = new String ( byteArrayOutputStream . toByteArray ( ) ) ; return this ; |
public class NonBlockingCharArrayWriter { /** * Write a portion of a string to the buffer .
* @ param sStr
* String to be written from
* @ param nOfs
* Offset from which to start reading characters
* @ param nLen
* Number of characters to be written */
@ Override public void write ( @ Nonnull final String sStr , @ Nonnegative final int nOfs , @ Nonnegative final int nLen ) { } } | if ( nLen > 0 ) { final int newcount = m_nCount + nLen ; if ( newcount > m_aBuf . length ) { m_aBuf = Arrays . copyOf ( m_aBuf , Math . max ( m_aBuf . length << 1 , newcount ) ) ; } sStr . getChars ( nOfs , nOfs + nLen , m_aBuf , m_nCount ) ; m_nCount = newcount ; } |
public class Database { /** * Creates database controller and new database instance if wasn ' t created yet or parameter reset was set to true .
* @ param app Application instance .
* @ param resetDBHelperInstance True if underlying db helper should be recreated . This should be set to true only for tests .
* @ param log Logger instance to use Foundation logging mechanism .
* @ return DB controller instance . */
public static synchronized Database getInstance ( @ NonNull Application app , boolean resetDBHelperInstance , @ NonNull Logger log ) { } } | if ( dbHelper == null ) { dbHelper = DatabaseHelper . getInstance ( app . getApplicationContext ( ) ) ; } else if ( resetDBHelperInstance ) { // For testing purposes we need separate instance of DatabaseHelper for every test .
dbHelper = new DatabaseHelper ( app . getApplicationContext ( ) ) ; } return new Database ( log ) ; |
public class StringConverter { /** * Serializes a Java object to a String representation .
* @ param obj
* the object to serialize
* @ param converters
* an optional collection of custom converter classes
* @ return a String representation of the Java object */
public String serialize ( final Object obj , final Collection < Converter < ? > > converters ) { } } | final DelegatingConverter delegatingConverter = new DelegatingConverter ( ) ; if ( converters != null ) { for ( final Converter < ? > converter : converters ) { delegatingConverter . addConverter ( converter ) ; } } delegatingConverter . addConverter ( new ConfigurationItemConverter ( ) ) ; delegatingConverter . addConverter ( getResourceConverter ( ) ) ; delegatingConverter . addConverter ( new StandardTypeConverter ( _configuration , delegatingConverter ) ) ; delegatingConverter . initializeAll ( _injectionManager ) ; return delegatingConverter . toString ( obj ) ; |
public class CachedConnectionManagerImpl { /** * { @ inheritDoc } */
public void registerConnection ( org . ironjacamar . core . api . connectionmanager . ConnectionManager cm , org . ironjacamar . core . api . connectionmanager . listener . ConnectionListener cl , Object connection ) { } } | if ( debug ) { synchronized ( connectionStackTraces ) { connectionStackTraces . put ( connection , new Throwable ( "STACKTRACE" ) ) ; } } Context context = currentContext ( ) ; log . tracef ( "registering connection from connection manager: %s, connection : %s, context: %s" , cm , connection , context ) ; if ( context != null ) { // Use internal API
org . ironjacamar . core . connectionmanager . ConnectionManager iCm = ( org . ironjacamar . core . connectionmanager . ConnectionManager ) cm ; org . ironjacamar . core . connectionmanager . listener . ConnectionListener iCl = ( org . ironjacamar . core . connectionmanager . listener . ConnectionListener ) cl ; if ( Tracer . isEnabled ( ) ) { Tracer . registerCCMConnection ( iCl . getManagedConnectionPool ( ) . getPool ( ) . getConfiguration ( ) . getId ( ) , iCl . getManagedConnectionPool ( ) , iCl , connection , context . toString ( ) ) ; } context . registerConnection ( iCm , iCl , connection ) ; } |
public class HttpUtils { /** * Uses the post method to send a url with arguments by http , this method can call RESTful Api .
* @ param url the http url
* @ param timeout milliseconds to wait for the server to respond before giving up
* @ return the response body stream as UTF - 8 string if response status is OK or CREATED */
public static String post ( String url , Integer timeout ) throws IOException { } } | final StringBuilder contentBuffer = new StringBuilder ( ) ; post ( url , timeout , inputStream -> { try ( BufferedReader br = new BufferedReader ( new InputStreamReader ( inputStream , "UTF-8" ) ) ) { String line ; while ( ( line = br . readLine ( ) ) != null ) { contentBuffer . append ( line ) ; } } } ) ; return contentBuffer . toString ( ) ; |
public class DefaultRetryPolicy { /** * Returns the delay ( in milliseconds ) before next retry attempt . A negative value indicates that no more retries
* should be made .
* @ param exception the exception from the failed request , represented as an BceClientException object .
* @ param retriesAttempted the number of times the current request has been attempted
* ( not including the next attempt after the delay ) .
* @ return the delay ( in milliseconds ) before next retry attempt . A negative value indicates that no more retries
* should be made . */
@ Override public long getDelayBeforeNextRetryInMillis ( BceClientException exception , int retriesAttempted ) { } } | if ( ! this . shouldRetry ( exception , retriesAttempted ) ) { return - 1 ; } if ( retriesAttempted < 0 ) { return 0 ; } return ( 1 << ( retriesAttempted + 1 ) ) * SCALE_FACTOR ; |
public class TimeSeries { /** * Creates a { @ link TimeSeries } .
* @ param labelValues the { @ code LabelValue } s that uniquely identify this { @ code TimeSeries } .
* @ param points the data { @ code Point } s of this { @ code TimeSeries } .
* @ param startTimestamp the start { @ code Timestamp } of this { @ code TimeSeries } . Must be non - null
* for cumulative { @ code Point } s .
* @ return a { @ code TimeSeries } .
* @ since 0.17 */
public static TimeSeries create ( List < LabelValue > labelValues , List < Point > points , @ Nullable Timestamp startTimestamp ) { } } | Utils . checkListElementNotNull ( Utils . checkNotNull ( points , "points" ) , "point" ) ; return createInternal ( labelValues , Collections . unmodifiableList ( new ArrayList < Point > ( points ) ) , startTimestamp ) ; |
public class FastaReaderHelper { /** * Read a fasta file containing amino acids with setup that would handle most
* cases . User is responsible for closing InputStream because you opened it
* @ param inStream
* @ return
* @ throws IOException */
public static LinkedHashMap < String , ProteinSequence > readFastaProteinSequence ( InputStream inStream ) throws IOException { } } | FastaReader < ProteinSequence , AminoAcidCompound > fastaReader = new FastaReader < ProteinSequence , AminoAcidCompound > ( inStream , new GenericFastaHeaderParser < ProteinSequence , AminoAcidCompound > ( ) , new ProteinSequenceCreator ( AminoAcidCompoundSet . getAminoAcidCompoundSet ( ) ) ) ; return fastaReader . process ( ) ; |
public class ColumnBuilder { /** * For creating regular columns
* @ return */
protected AbstractColumn buildSimpleColumn ( ) { } } | SimpleColumn column = new SimpleColumn ( ) ; populateCommonAttributes ( column ) ; columnProperty . getFieldProperties ( ) . putAll ( fieldProperties ) ; column . setColumnProperty ( columnProperty ) ; column . setExpressionToGroupBy ( customExpressionToGroupBy ) ; column . setFieldDescription ( fieldDescription ) ; return column ; |
public class Agent { /** * Finds the right plug - in for an instance .
* @ param instance a non - null instance
* @ return the plug - in associated with the instance ' s installer name */
public PluginInterface findPlugin ( Instance instance ) { } } | // Find a plug - in
PluginInterface result = null ; if ( this . simulatePlugins ) { this . logger . finer ( "Simulating plugins..." ) ; result = new PluginMock ( ) ; } else { String installerName = null ; if ( instance . getComponent ( ) != null ) installerName = ComponentHelpers . findComponentInstaller ( instance . getComponent ( ) ) ; // Run through available plug - ins
for ( PluginInterface pi : this . plugins ) { if ( pi . getPluginName ( ) . equalsIgnoreCase ( installerName ) ) { result = pi ; break ; } } if ( result == null ) this . logger . severe ( "No plugin was found for instance '" + instance . getName ( ) + "' with installer '" + installerName + "'." ) ; } // Initialize the result , if any
if ( result != null ) result . setNames ( this . applicationName , this . scopedInstancePath ) ; return result == null ? null : new PluginProxy ( result ) ; |
public class NFACompiler { /** * Verifies if the provided pattern can possibly generate empty match . Example of patterns that can possibly
* generate empty matches are : A * , A ? , A * B ? etc .
* @ param pattern pattern to check
* @ return true if empty match could potentially match the pattern , false otherwise */
public static boolean canProduceEmptyMatches ( final Pattern < ? , ? > pattern ) { } } | NFAFactoryCompiler < ? > compiler = new NFAFactoryCompiler < > ( checkNotNull ( pattern ) ) ; compiler . compileFactory ( ) ; State < ? > startState = compiler . getStates ( ) . stream ( ) . filter ( State :: isStart ) . findFirst ( ) . orElseThrow ( ( ) -> new IllegalStateException ( "Compiler produced no start state. It is a bug. File a jira." ) ) ; Set < State < ? > > visitedStates = new HashSet < > ( ) ; final Stack < State < ? > > statesToCheck = new Stack < > ( ) ; statesToCheck . push ( startState ) ; while ( ! statesToCheck . isEmpty ( ) ) { final State < ? > currentState = statesToCheck . pop ( ) ; if ( visitedStates . contains ( currentState ) ) { continue ; } else { visitedStates . add ( currentState ) ; } for ( StateTransition < ? > transition : currentState . getStateTransitions ( ) ) { if ( transition . getAction ( ) == StateTransitionAction . PROCEED ) { if ( transition . getTargetState ( ) . isFinal ( ) ) { return true ; } else { statesToCheck . push ( transition . getTargetState ( ) ) ; } } } } return false ; |
public class StructureDiagramGenerator { /** * Get the unplaced ring atom in this bond
* @ param bond the bond to be search for the unplaced ring atom
* @ return the unplaced ring atom in this bond */
private IAtom getRingAtom ( IBond bond ) { } } | if ( bond . getBegin ( ) . getFlag ( CDKConstants . ISINRING ) && ! bond . getBegin ( ) . getFlag ( CDKConstants . ISPLACED ) ) { return bond . getBegin ( ) ; } if ( bond . getEnd ( ) . getFlag ( CDKConstants . ISINRING ) && ! bond . getEnd ( ) . getFlag ( CDKConstants . ISPLACED ) ) { return bond . getEnd ( ) ; } return null ; |
public class KerasLayerUtils { /** * Check whether Keras weight regularization is of unknown type . Currently prints a warning
* since main use case for model import is inference , not further training . Unlikely since
* standard Keras weight regularizers are L1 and L2.
* @ param regularizerConfig Map containing Keras weight reguarlization configuration */
private static void checkForUnknownRegularizer ( Map < String , Object > regularizerConfig , boolean enforceTrainingConfig , KerasLayerConfiguration conf ) throws UnsupportedKerasConfigurationException { } } | if ( regularizerConfig != null ) { for ( String field : regularizerConfig . keySet ( ) ) { if ( ! field . equals ( conf . getREGULARIZATION_TYPE_L1 ( ) ) && ! field . equals ( conf . getREGULARIZATION_TYPE_L2 ( ) ) && ! field . equals ( conf . getLAYER_FIELD_NAME ( ) ) && ! field . equals ( conf . getLAYER_FIELD_CLASS_NAME ( ) ) && ! field . equals ( conf . getLAYER_FIELD_CONFIG ( ) ) ) { if ( enforceTrainingConfig ) throw new UnsupportedKerasConfigurationException ( "Unknown regularization field " + field ) ; else log . warn ( "Ignoring unknown regularization field " + field ) ; } } } |
public class Parameters { /** * Gets a parameter whose value is a ( possibly empty ) list of positive integers . */
public List < Integer > getPositiveIntegerList ( final String param ) { } } | return getList ( param , new StringToInteger ( ) , new IsPositive < Integer > ( ) , "positive integer" ) ; |
public class RetryTemplate { /** * Execute the callable with retries .
* @ param callable to execute
* @ param < T > the return type of the callable
* @ return the result of the callable
* @ throws Exception if the callable still throw an exception after all retries */
public < T > T execute ( final Callable < T > callable ) throws Exception { } } | int attempts = 0 ; int maxAttempts = retryPolicy . getMaxAttempts ( ) ; long delay = retryPolicy . getDelay ( ) ; TimeUnit timeUnit = retryPolicy . getTimeUnit ( ) ; while ( attempts < maxAttempts ) { try { attempts ++ ; beforeCall ( ) ; T result = callable . call ( ) ; afterCall ( result ) ; return result ; } catch ( Exception e ) { onException ( e ) ; if ( attempts >= maxAttempts ) { onMaxAttempts ( e ) ; throw e ; } beforeWait ( ) ; sleep ( timeUnit . toMillis ( delay ) ) ; afterWait ( ) ; } } return null ; |
public class Repository { /** * Loads modules from classpath */
public void loadClasspath ( Resolver resolver ) throws IOException { } } | Enumeration < URL > e ; e = getClass ( ) . getClassLoader ( ) . getResources ( MODULE_DESCRIPTOR ) ; while ( e . hasMoreElements ( ) ) { loadModule ( resolver , e . nextElement ( ) ) ; } |
public class Bag { /** * Removes the element at the specified position in this Bag . Order of elements is not preserved .
* @ param index
* @ return element that was removed from the Bag . */
public E remove ( int index ) { } } | E e = data [ index ] ; // make copy of element to remove so it can be returned
data [ index ] = data [ -- size ] ; // overwrite item to remove with last element
data [ size ] = null ; // null last element , so gc can do its work
return e ; |
public class CommerceNotificationAttachmentPersistenceImpl { /** * Returns the commerce notification attachment where uuid = & # 63 ; and groupId = & # 63 ; or returns < code > null < / code > if it could not be found , optionally using the finder cache .
* @ param uuid the uuid
* @ param groupId the group ID
* @ param retrieveFromCache whether to retrieve from the finder cache
* @ return the matching commerce notification attachment , or < code > null < / code > if a matching commerce notification attachment could not be found */
@ Override public CommerceNotificationAttachment fetchByUUID_G ( String uuid , long groupId , boolean retrieveFromCache ) { } } | Object [ ] finderArgs = new Object [ ] { uuid , groupId } ; Object result = null ; if ( retrieveFromCache ) { result = finderCache . getResult ( FINDER_PATH_FETCH_BY_UUID_G , finderArgs , this ) ; } if ( result instanceof CommerceNotificationAttachment ) { CommerceNotificationAttachment commerceNotificationAttachment = ( CommerceNotificationAttachment ) result ; if ( ! Objects . equals ( uuid , commerceNotificationAttachment . getUuid ( ) ) || ( groupId != commerceNotificationAttachment . getGroupId ( ) ) ) { result = null ; } } if ( result == null ) { StringBundler query = new StringBundler ( 4 ) ; query . append ( _SQL_SELECT_COMMERCENOTIFICATIONATTACHMENT_WHERE ) ; boolean bindUuid = false ; if ( uuid == null ) { query . append ( _FINDER_COLUMN_UUID_G_UUID_1 ) ; } else if ( uuid . equals ( "" ) ) { query . append ( _FINDER_COLUMN_UUID_G_UUID_3 ) ; } else { bindUuid = true ; query . append ( _FINDER_COLUMN_UUID_G_UUID_2 ) ; } query . append ( _FINDER_COLUMN_UUID_G_GROUPID_2 ) ; String sql = query . toString ( ) ; Session session = null ; try { session = openSession ( ) ; Query q = session . createQuery ( sql ) ; QueryPos qPos = QueryPos . getInstance ( q ) ; if ( bindUuid ) { qPos . add ( uuid ) ; } qPos . add ( groupId ) ; List < CommerceNotificationAttachment > list = q . list ( ) ; if ( list . isEmpty ( ) ) { finderCache . putResult ( FINDER_PATH_FETCH_BY_UUID_G , finderArgs , list ) ; } else { CommerceNotificationAttachment commerceNotificationAttachment = list . get ( 0 ) ; result = commerceNotificationAttachment ; cacheResult ( commerceNotificationAttachment ) ; } } catch ( Exception e ) { finderCache . removeResult ( FINDER_PATH_FETCH_BY_UUID_G , finderArgs ) ; throw processException ( e ) ; } finally { closeSession ( session ) ; } } if ( result instanceof List < ? > ) { return null ; } else { return ( CommerceNotificationAttachment ) result ; } |
public class TypedArrayCompat { /** * Retrieve the Drawable for the attribute at < var > index < / var > .
* @ param index Index of attribute to retrieve .
* @ return Drawable for the attribute , or null if not defined . */
public static Drawable getDrawable ( Resources . Theme theme , TypedArray a , TypedValue [ ] values , int index ) { } } | if ( values != null && theme != null ) { TypedValue v = values [ index ] ; if ( v . type == TypedValue . TYPE_ATTRIBUTE ) { TEMP_ARRAY [ 0 ] = v . data ; TypedArray tmp = theme . obtainStyledAttributes ( null , TEMP_ARRAY , 0 , 0 ) ; try { return tmp . getDrawable ( 0 ) ; } finally { tmp . recycle ( ) ; } } } if ( a != null ) { return LollipopDrawablesCompat . getDrawable ( a , index , theme ) ; } return null ; |
public class CPInstancePersistenceImpl { /** * Returns the cp instance with the primary key or throws a { @ link com . liferay . portal . kernel . exception . NoSuchModelException } if it could not be found .
* @ param primaryKey the primary key of the cp instance
* @ return the cp instance
* @ throws NoSuchCPInstanceException if a cp instance with the primary key could not be found */
@ Override public CPInstance findByPrimaryKey ( Serializable primaryKey ) throws NoSuchCPInstanceException { } } | CPInstance cpInstance = fetchByPrimaryKey ( primaryKey ) ; if ( cpInstance == null ) { if ( _log . isDebugEnabled ( ) ) { _log . debug ( _NO_SUCH_ENTITY_WITH_PRIMARY_KEY + primaryKey ) ; } throw new NoSuchCPInstanceException ( _NO_SUCH_ENTITY_WITH_PRIMARY_KEY + primaryKey ) ; } return cpInstance ; |
public class ReviewsImpl { /** * The reviews created would show up for Reviewers on your team . As Reviewers complete reviewing , results of the Review would be POSTED ( i . e . HTTP POST ) on the specified CallBackEndpoint .
* & lt ; h3 & gt ; CallBack Schemas & lt ; / h3 & gt ;
* & lt ; h4 & gt ; Review Completion CallBack Sample & lt ; / h4 & gt ;
* & lt ; p & gt ;
* { & lt ; br / & gt ;
* " ReviewId " : " & lt ; Review Id & gt ; " , & lt ; br / & gt ;
* " ModifiedOn " : " 2016-10-11T22:36:32.9934851Z " , & lt ; br / & gt ;
* " ModifiedBy " : " & lt ; Name of the Reviewer & gt ; " , & lt ; br / & gt ;
* " CallBackType " : " Review " , & lt ; br / & gt ;
* " ContentId " : " & lt ; The ContentId that was specified input & gt ; " , & lt ; br / & gt ;
* " Metadata " : { & lt ; br / & gt ;
* " adultscore " : " 0 . xxx " , & lt ; br / & gt ;
* " a " : " False " , & lt ; br / & gt ;
* " racyscore " : " 0 . xxx " , & lt ; br / & gt ;
* " r " : " True " & lt ; br / & gt ;
* } , & lt ; br / & gt ;
* " ReviewerResultTags " : { & lt ; br / & gt ;
* " a " : " False " , & lt ; br / & gt ;
* " r " : " True " & lt ; br / & gt ;
* } & lt ; br / & gt ;
* } & lt ; br / & gt ;
* & lt ; / p & gt ; .
* @ param teamName Your team name .
* @ param urlContentType The content type .
* @ param createReviewBody Body for create reviews API
* @ param createReviewsOptionalParameter the object representing the optional parameters to be set before calling this API
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ throws APIErrorException thrown if the request is rejected by server
* @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
* @ return the List & lt ; String & gt ; object if successful . */
public List < String > createReviews ( String teamName , String urlContentType , List < CreateReviewBodyItem > createReviewBody , CreateReviewsOptionalParameter createReviewsOptionalParameter ) { } } | return createReviewsWithServiceResponseAsync ( teamName , urlContentType , createReviewBody , createReviewsOptionalParameter ) . toBlocking ( ) . single ( ) . body ( ) ; |
public class CPDefinitionVirtualSettingPersistenceImpl { /** * Returns the cp definition virtual setting where uuid = & # 63 ; and groupId = & # 63 ; or throws a { @ link NoSuchCPDefinitionVirtualSettingException } if it could not be found .
* @ param uuid the uuid
* @ param groupId the group ID
* @ return the matching cp definition virtual setting
* @ throws NoSuchCPDefinitionVirtualSettingException if a matching cp definition virtual setting could not be found */
@ Override public CPDefinitionVirtualSetting findByUUID_G ( String uuid , long groupId ) throws NoSuchCPDefinitionVirtualSettingException { } } | CPDefinitionVirtualSetting cpDefinitionVirtualSetting = fetchByUUID_G ( uuid , groupId ) ; if ( cpDefinitionVirtualSetting == null ) { StringBundler msg = new StringBundler ( 6 ) ; msg . append ( _NO_SUCH_ENTITY_WITH_KEY ) ; msg . append ( "uuid=" ) ; msg . append ( uuid ) ; msg . append ( ", groupId=" ) ; msg . append ( groupId ) ; msg . append ( "}" ) ; if ( _log . isDebugEnabled ( ) ) { _log . debug ( msg . toString ( ) ) ; } throw new NoSuchCPDefinitionVirtualSettingException ( msg . toString ( ) ) ; } return cpDefinitionVirtualSetting ; |
public class SimulationJob { /** * A map that contains tag keys and tag values that are attached to the simulation job .
* @ param tags
* A map that contains tag keys and tag values that are attached to the simulation job .
* @ return Returns a reference to this object so that method calls can be chained together . */
public SimulationJob withTags ( java . util . Map < String , String > tags ) { } } | setTags ( tags ) ; return this ; |
public class FldExporter { /** * Writes the engine into the given writer
* @ param engine is the engine to export
* @ param writer is the output where the engine will be written to
* @ param reader is the reader of a set of lines containing space - separated
* input values
* @ throws IOException if any error occurs upon writing to the writer */
public void write ( Engine engine , Writer writer , Reader reader ) throws IOException { } } | if ( exportHeaders ) { writer . append ( header ( engine ) ) . append ( "\n" ) ; } String line ; int lineNumber = 0 ; BufferedReader bufferedReader = new BufferedReader ( reader ) ; try { while ( ( line = bufferedReader . readLine ( ) ) != null ) { ++ lineNumber ; List < Double > inputValues ; if ( lineNumber == 1 ) { // automatic detection of header .
try { inputValues = parse ( line ) ; } catch ( Exception ex ) { continue ; } } else { inputValues = parse ( line ) ; } write ( engine , writer , inputValues , engine . getInputVariables ( ) ) ; } } catch ( RuntimeException ex ) { throw ex ; } catch ( IOException ex ) { throw ex ; } finally { bufferedReader . close ( ) ; } |
public class A_CmsListResourceCollector { /** * Wrapper method for caching the result of { @ link # getResources ( CmsObject , Map ) } . < p >
* @ param cms the cms object
* @ param params the parameter map
* @ return the result of { @ link # getResources ( CmsObject , Map ) }
* @ throws CmsException if something goes wrong */
protected List < CmsResource > getInternalResources ( CmsObject cms , Map < String , String > params ) throws CmsException { } } | synchronized ( this ) { if ( m_resources == null ) { m_resources = getResources ( cms , params ) ; Iterator < CmsResource > it = m_resources . iterator ( ) ; while ( it . hasNext ( ) ) { CmsResource resource = it . next ( ) ; m_resCache . put ( resource . getStructureId ( ) . toString ( ) , resource ) ; } } } return m_resources ; |
public class ClientSharedObject { /** * { @ inheritDoc } */
public void disconnect ( ) { } } | if ( isConnected ( ) ) { SharedObjectMessage msg = new SharedObjectMessage ( name , 0 , isPersistent ( ) ) ; msg . addEvent ( new SharedObjectEvent ( Type . SERVER_DISCONNECT , null , null ) ) ; Channel c = ( ( RTMPConnection ) source ) . getChannel ( 3 ) ; c . write ( msg ) ; notifyDisconnect ( ) ; initialSyncReceived = false ; } |
public class CloudResourceBundleControl { /** * Create an instance of < code > CloudResourceBundleControl < / code > with the specified
* service account , cache expiration , bundle inclusion / exclusion name pattern , the custom
* bundle name mapper and bundle lookup mode .
* The cache expiration time is in milliseconds
* and must be positive except for two special values .
* < ul >
* < li > { @ link Control # TTL _ DONT _ CACHE } to disable resource bundle cache < / li >
* < li > { @ link Control # TTL _ NO _ EXPIRATION _ CONTROL } to disable resource bundle cache expiration < / li >
* < / ul >
* @ param serviceAccount The service account .
* @ param cacheExpiration The cache expiration , see the method description for details .
* @ param inclusionPattern The regular expression pattern string for specifying resource bundle
* names to be included , or null .
* @ param exclusionPattern The regular expression pattern string for specifying resource bundle
* package names to be excluded in addition to
* { @ link # GP _ RB _ DEFAULT _ EXCLUSION _ PATTERN _ STRING } , or null .
* @ param nameMapper The custom base name to bundle ID mapper , or null if no mapping is necessary .
* @ param mode The resource bundle lookup mode . If null , and the environment variable
* { @ link # GP _ LOOKUP _ MODE } is not set , { @ link LookupMode # REMOTE _ THEN _ LOCAL
* REMOTE _ THEN _ LOCAL } is used .
* @ return An instance of CloudResourceBundleControl .
* @ throws IllegalArgumentException when < code > serviceAccount < / code > is null ,
* or < code > cacheExpiration < / code > value is illegal ,
* or < code > inclusionPattern < / code > / < code > exclusionPattern < / code > syntax is invalid . */
public static CloudResourceBundleControl getInstance ( ServiceAccount serviceAccount , long cacheExpiration , String inclusionPattern , String exclusionPattern , NameMapper nameMapper , LookupMode mode ) { } } | if ( serviceAccount == null ) { throw new IllegalArgumentException ( "serviceAccount is null" ) ; } if ( cacheExpiration < 0 && cacheExpiration != Control . TTL_DONT_CACHE && cacheExpiration != Control . TTL_NO_EXPIRATION_CONTROL ) { throw new IllegalArgumentException ( "Illegal cacheExpiration: " + cacheExpiration ) ; } Pattern incPat = null ; Pattern excPat = null ; if ( inclusionPattern != null ) { try { incPat = Pattern . compile ( inclusionPattern ) ; } catch ( PatternSyntaxException e ) { throw new IllegalArgumentException ( "Illegal inclusionPattern: " + inclusionPattern , e ) ; } } if ( exclusionPattern != null ) { try { excPat = Pattern . compile ( exclusionPattern ) ; } catch ( PatternSyntaxException e ) { throw new IllegalArgumentException ( "Illegal exclusionPattern: " + exclusionPattern , e ) ; } } return new CloudResourceBundleControl ( serviceAccount , mode , cacheExpiration , incPat , excPat , nameMapper ) ; |
public class ProtocolBufferClassHierarchy { /** * Deserialize a class hierarchy from a file . The file can be generated from either Java or C #
* @ param file
* @ return
* @ throws IOException
* @ deprecated in 0.12 . Use AvroClassHierarchySerializer instead */
public static ClassHierarchy deserialize ( final File file ) throws IOException { } } | try ( final InputStream stream = new FileInputStream ( file ) ) { final ClassHierarchyProto . Node root = ClassHierarchyProto . Node . parseFrom ( stream ) ; return new ProtocolBufferClassHierarchy ( root ) ; } |
public class FontChooser { /** * Set the size of the selected font .
* @ param size the size of the selected font
* @ see # getSelectedFontSize */
public FontChooser setSelectedFontSize ( int size ) { } } | String sizeString = String . valueOf ( size ) ; for ( int i = 0 ; i < this . fontSizeStrings . length ; i ++ ) { if ( this . fontSizeStrings [ i ] . equals ( sizeString ) ) { getFontSizeList ( ) . setSelectedIndex ( i ) ; break ; } } getFontSizeTextField ( ) . setText ( sizeString ) ; updateSampleFont ( ) ; return this ; |
public class JavacState { /** * Recursively delete a directory and all its contents . */
private static void deleteContents ( File dir ) { } } | if ( dir != null && dir . exists ( ) ) { for ( File f : dir . listFiles ( ) ) { if ( f . isDirectory ( ) ) { deleteContents ( f ) ; } f . delete ( ) ; } } |
public class SpliteratorBasedStream { /** * A potentially asynchronous flatMap operation where data from each publisher may arrive out of order
* @ param mapper
* @ return */
public < R > ReactiveSeq < R > mergeMap ( final Function < ? super T , ? extends Publisher < ? extends R > > mapper ) { } } | return mergeMap ( 256 , mapper ) ; |
public class DBIDUtil { /** * Draw a single random sample .
* @ param ids IDs to draw from
* @ param random Random value
* @ return Random ID */
public static DBIDVar randomSample ( DBIDs ids , RandomFactory random ) { } } | return randomSample ( ids , random . getSingleThreadedRandom ( ) ) ; |
public class PhaseApplication { /** * Sends { @ code " = = = < header > = = = " } to { @ link Reportable # output ( String . . . )
* output }
* @ param header Non - null header
* @ param phase Non - null phase
* @ param stage Non - null stage */
protected void beginStage ( final String header , final String stage , final String numStages ) { } } | boolean print = getPhaseConfiguration ( ) . isVerbose ( ) ; if ( print ) { StringBuilder bldr = new StringBuilder ( ) ; bldr . append ( INDENT ) ; bldr . append ( "Stage " ) ; bldr . append ( stage ) ; bldr . append ( " of " ) ; bldr . append ( numStages ) ; bldr . append ( ": " ) ; bldr . append ( header ) ; reportable . output ( bldr . toString ( ) ) ; } |
public class GoldenSearch { /** * Finds the local minimum of the function { @ code f } .
* @ param eps the desired accuracy of the result
* @ param maxIterations the maximum number of iterations to perform
* @ param a the left bound on the minimum
* @ param b the right bound on the minimum
* @ param f the function to find the minimize of
* @ return the value of variable { @ code pos } that produces the local minima */
public static double minimize ( double eps , int maxIterations , double a , double b , Function1D f ) { } } | if ( a > b ) { double tmp = b ; b = a ; a = tmp ; } // Intitial values
int iter = 0 ; double x1 = a + om_tau * ( b - a ) ; double f1 = f . f ( x1 ) ; double x2 = a + tau * ( b - a ) ; double f2 = f . f ( x2 ) ; while ( b - a > 2 * eps && iter < maxIterations ) { if ( f1 > f2 ) { a = x1 ; x1 = x2 ; f1 = f2 ; x2 = a + tau * ( b - a ) ; f2 = f . f ( x2 ) ; } else // f1 < f2
{ b = x2 ; x2 = x1 ; f2 = f1 ; x1 = a + om_tau * ( b - a ) ; f1 = f . f ( x1 ) ; } iter ++ ; } return ( a + b ) / 2.0 ; |
public class PortletAdministrationHelper { /** * updates the editPortlet form with the portletType of the first ( and only ) portletDefinition
* passed in through the Map of portlet definitions .
* @ param portletDefinitions
* @ param form
* @ return PortletPublishingDefinition of the first portlet definition in the list , null if the
* list is empty or has more than one element . */
public PortletPublishingDefinition updateFormForSinglePortletType ( Map < IPortletType , PortletPublishingDefinition > portletDefinitions , PortletDefinitionForm form ) { } } | if ( portletDefinitions . size ( ) != 1 ) { return null ; } IPortletType portletType = portletDefinitions . keySet ( ) . iterator ( ) . next ( ) ; form . setTypeId ( portletType . getId ( ) ) ; PortletPublishingDefinition cpd = portletPublishingDefinitionDao . getChannelPublishingDefinition ( portletType . getId ( ) ) ; form . setChannelPublishingDefinition ( cpd ) ; return cpd ; |
public class TldTracker { /** * Selects the scale for the image pyramid based on image size and feature size
* @ return scales for image pyramid */
public static int [ ] selectPyramidScale ( int imageWidth , int imageHeight , int minSize ) { } } | int w = Math . max ( imageWidth , imageHeight ) ; int maxScale = w / minSize ; int n = 1 ; int scale = 1 ; while ( scale * 2 < maxScale ) { n ++ ; scale *= 2 ; } int ret [ ] = new int [ n ] ; scale = 1 ; for ( int i = 0 ; i < n ; i ++ ) { ret [ i ] = scale ; scale *= 2 ; } return ret ; |
public class CSTNode { /** * Returns the first matching meaning of the specified types .
* Returns Types . UNKNOWN if there are no matches . */
public int getMeaningAs ( int [ ] types ) { } } | for ( int i = 0 ; i < types . length ; i ++ ) { if ( isA ( types [ i ] ) ) { return types [ i ] ; } } return Types . UNKNOWN ; |
public class ReduceOps { /** * Constructs a { @ code TerminalOp } that implements a functional reduce on
* reference values producing an optional reference result .
* @ param < T > The type of the input elements , and the type of the result
* @ param operator The reducing function
* @ return A { @ code TerminalOp } implementing the reduction */
public static < T > TerminalOp < T , Optional < T > > makeRef ( BinaryOperator < T > operator ) { } } | Objects . requireNonNull ( operator ) ; class ReducingSink implements AccumulatingSink < T , Optional < T > , ReducingSink > { private boolean empty ; private T state ; public void begin ( long size ) { empty = true ; state = null ; } @ Override public void accept ( T t ) { if ( empty ) { empty = false ; state = t ; } else { state = operator . apply ( state , t ) ; } } @ Override public Optional < T > get ( ) { return empty ? Optional . empty ( ) : Optional . of ( state ) ; } @ Override public void combine ( ReducingSink other ) { if ( ! other . empty ) accept ( other . state ) ; } } return new ReduceOp < T , Optional < T > , ReducingSink > ( StreamShape . REFERENCE ) { @ Override public ReducingSink makeSink ( ) { return new ReducingSink ( ) ; } } ; |
public class CommerceTierPriceEntryPersistenceImpl { /** * Removes all the commerce tier price entries where groupId = & # 63 ; from the database .
* @ param groupId the group ID */
@ Override public void removeByGroupId ( long groupId ) { } } | for ( CommerceTierPriceEntry commerceTierPriceEntry : findByGroupId ( groupId , QueryUtil . ALL_POS , QueryUtil . ALL_POS , null ) ) { remove ( commerceTierPriceEntry ) ; } |
public class PartialVAFile { /** * Calculate selectivity coefficients .
* @ param daFiles List of files to use
* @ param query Query vector
* @ param epsilon Epsilon radius */
protected static void calculateSelectivityCoeffs ( List < DoubleObjPair < DAFile > > daFiles , NumberVector query , double epsilon ) { } } | final int dimensions = query . getDimensionality ( ) ; double [ ] lowerVals = new double [ dimensions ] ; double [ ] upperVals = new double [ dimensions ] ; VectorApproximation queryApprox = calculatePartialApproximation ( null , query , daFiles ) ; for ( int i = 0 ; i < dimensions ; i ++ ) { final double val = query . doubleValue ( i ) ; lowerVals [ i ] = val - epsilon ; upperVals [ i ] = val + epsilon ; } DoubleVector lowerEpsilon = DoubleVector . wrap ( lowerVals ) ; VectorApproximation lowerEpsilonPartitions = calculatePartialApproximation ( null , lowerEpsilon , daFiles ) ; DoubleVector upperEpsilon = DoubleVector . wrap ( upperVals ) ; VectorApproximation upperEpsilonPartitions = calculatePartialApproximation ( null , upperEpsilon , daFiles ) ; for ( int i = 0 ; i < daFiles . size ( ) ; i ++ ) { int coeff = ( queryApprox . getApproximation ( i ) - lowerEpsilonPartitions . getApproximation ( i ) ) + ( upperEpsilonPartitions . getApproximation ( i ) - queryApprox . getApproximation ( i ) ) + 1 ; daFiles . get ( i ) . first = coeff ; } |
public class DominatorTree { /** * debugging */
private void printObjs ( List < Long > changedIds , List < Long > oldDomIds , List < Long > newDomIds , List < Boolean > addedByDirtySet , List < Long > changedIdx ) { } } | if ( changedIds . size ( ) > 20 ) return ; TreeMap < Integer , String > m = new TreeMap ( ) ; for ( int i = 0 ; i < changedIds . size ( ) ; i ++ ) { Long iid = changedIds . get ( i ) ; Long oldDom = oldDomIds . get ( i ) ; Long newDom = newDomIds . get ( i ) ; Long index = changedIdx . get ( i ) ; Boolean addedByDirt = addedByDirtySet . get ( i ) ; Instance ii = heap . getInstanceByID ( iid . longValue ( ) ) ; int number = ii . getInstanceNumber ( ) ; String text = "Index: " + index + ( addedByDirt ? " New " : " Old " ) + printInstance ( iid ) ; text += " OldDom " + printInstance ( oldDom ) ; text += " NewDom: " + printInstance ( newDom ) ; m . put ( number , text ) ; } for ( Integer in : m . keySet ( ) ) { System . out . println ( m . get ( in ) ) ; } |
public class ResultCache { /** * Remove all cache entries between firstPara ( included ) and lastPara ( included )
* shift all numberOfParagraph by ' shift ' */
void removeAndShift ( int firstPara , int lastPara , int shift ) { } } | for ( int i = 0 ; i < entries . size ( ) ; i ++ ) { if ( entries . get ( i ) . numberOfParagraph >= firstPara && entries . get ( i ) . numberOfParagraph <= lastPara ) { entries . remove ( i ) ; i -- ; } } for ( CacheEntry anEntry : entries ) { if ( anEntry . numberOfParagraph > lastPara ) { anEntry . numberOfParagraph += shift ; } } |
public class Parser { /** * See if there is a word ( all letters )
* @ param textProvider
* @ return true if matched */
public boolean alpha ( TextProvider textProvider ) { } } | clearLastToken ( textProvider ) ; clearLeadingSpaces ( textProvider ) ; mark ( textProvider ) ; if ( m_debug ) debug ( "testing" , textProvider ) ; StringBuilder sb = new StringBuilder ( ) ; while ( true ) { char c = getNextChar ( textProvider ) ; if ( ! Character . isLetter ( c ) ) break ; remark ( textProvider ) ; sb . append ( c ) ; } reset ( textProvider ) ; // removes last char
String s = sb . toString ( ) . trim ( ) ; if ( s . length ( ) == 0 ) return false ; textProvider . setLastToken ( s ) ; debug ( textProvider ) ; return true ; |
public class InheritanceHelper { /** * Replies if the type candidate is a subtype of the given super type .
* @ param candidate the type to test .
* @ param jvmSuperType the expected JVM super - type .
* @ param sarlSuperType the expected SARL super - type .
* @ return < code > true < / code > if the candidate is a sub - type of the super - type . */
public boolean isSubTypeOf ( JvmTypeReference candidate , Class < ? > jvmSuperType , Class < ? extends XtendTypeDeclaration > sarlSuperType ) { } } | final LightweightTypeReference reference = Utils . toLightweightTypeReference ( candidate , this . services ) ; return isSubTypeOf ( reference , jvmSuperType , sarlSuperType ) ; |
public class Do { /** * < div color = ' red ' style = " font - size : 24px ; color : red " > < b > < i > < u > JCYPHER < / u > < / i > < / b > < / div >
* < div color = ' red ' style = " font - size : 18px ; color : red " > < i > detach all relations and then delete a node in the DO part of a FOREACH expression < / i > < / div >
* < div color = ' red ' style = " font - size : 18px ; color : red " > < i > e . g . . . . < b > DETACH _ DELETE ( n ) < / b > < / i > < / div >
* < br / > */
public DoConcat DETACH_DELETE ( JcNode node ) { } } | ModifyTerminal mt = ModifyFactory . deleteElement ( node ) ; ASTNode clause = APIObjectAccess . getAstNode ( mt ) ; clause . setClauseType ( ClauseType . DETACH_DELETE ) ; return createConcat ( clause ) ; |
public class OAuth2JwtAccessTokenConverter { /** * Fetch a new public key from the AuthorizationServer .
* @ return true , if we could fetch it ; false , if we could not . */
private boolean tryCreateSignatureVerifier ( ) { } } | long t = System . currentTimeMillis ( ) ; if ( t - lastKeyFetchTimestamp < oAuth2Properties . getSignatureVerification ( ) . getPublicKeyRefreshRateLimit ( ) ) { return false ; } try { SignatureVerifier verifier = signatureVerifierClient . getSignatureVerifier ( ) ; if ( verifier != null ) { setVerifier ( verifier ) ; lastKeyFetchTimestamp = t ; log . debug ( "Public key retrieved from OAuth2 server to create SignatureVerifier" ) ; return true ; } } catch ( Throwable ex ) { log . error ( "could not get public key from OAuth2 server to create SignatureVerifier" , ex ) ; } return false ; |
public class RegisterMasterPRequest { /** * < code > optional . alluxio . grpc . meta . RegisterMasterPOptions options = 2 ; < / code > */
public alluxio . grpc . RegisterMasterPOptions getOptions ( ) { } } | return options_ == null ? alluxio . grpc . RegisterMasterPOptions . getDefaultInstance ( ) : options_ ; |
public class CUarray_format { /** * Returns the String identifying the given CUarray _ format
* @ param n The CUarray _ format
* @ return The String identifying the given CUarray _ format */
public static String stringFor ( int n ) { } } | switch ( n ) { case CU_AD_FORMAT_UNSIGNED_INT8 : return "CU_AD_FORMAT_UNSIGNED_INT8" ; case CU_AD_FORMAT_UNSIGNED_INT16 : return "CU_AD_FORMAT_UNSIGNED_INT16" ; case CU_AD_FORMAT_UNSIGNED_INT32 : return "CU_AD_FORMAT_UNSIGNED_INT32" ; case CU_AD_FORMAT_SIGNED_INT8 : return "CU_AD_FORMAT_SIGNED_INT8" ; case CU_AD_FORMAT_SIGNED_INT16 : return "CU_AD_FORMAT_SIGNED_INT16" ; case CU_AD_FORMAT_SIGNED_INT32 : return "CU_AD_FORMAT_SIGNED_INT32" ; case CU_AD_FORMAT_HALF : return "CU_AD_FORMAT_HALF" ; case CU_AD_FORMAT_FLOAT : return "CU_AD_FORMAT_FLOAT" ; } return "INVALID CUarray_format: " + n ; |
public class XMemcachedClient { /** * Delete key ' s data item from memcached . This method doesn ' t wait for reply
* @ param key
* @ param time
* @ throws InterruptedException
* @ throws MemcachedException */
public final void deleteWithNoReply ( final String key , final int time ) throws InterruptedException , MemcachedException { } } | try { this . delete0 ( key , time , 0 , true , this . opTimeout ) ; } catch ( TimeoutException e ) { throw new MemcachedException ( e ) ; } |
public class Client { /** * ( non - Javadoc )
* @ see org . restcomm . protocols . ss7 . map . api . service . supplementary . MAPServiceSupplementaryListener
* # onUnstructuredSSResponseIndication ( org . mobicents . protocols . ss7 . map . api . service
* . supplementary . UnstructuredSSResponseIndication ) */
@ Override public void onUnstructuredSSResponse ( UnstructuredSSResponse unstrResInd ) { } } | // This error condition . Client should never receive the
// UnstructuredSSResponseIndication
logger . error ( String . format ( "onUnstructuredSSResponseIndication for Dialog=%d and invokeId=%d" , unstrResInd . getMAPDialog ( ) . getLocalDialogId ( ) , unstrResInd . getInvokeId ( ) ) ) ; |
public class EigenvalueDecomposition { /** * Nonsymmetric reduction from Hessenberg to real Schur form . */
private void hqr2 ( ) { } } | // FIXME : does this fail on NaN / inf values ?
// This is derived from the Algol procedure hqr2,
// by Martin and Wilkinson , Handbook for Auto . Comp . ,
// Vol . ii - Linear Algebra , and the corresponding
// Fortran subroutine in EISPACK .
// Initialize
final int nn = this . n , low = 0 , high = nn - 1 ; // Store roots isolated by balanc and compute matrix norm
double norm = 0. ; for ( int i = 0 ; i < nn ; i ++ ) { // If eventually allowing low , high to be set , use this :
// if ( i < low | | i > high ) { d [ i ] = H [ i ] [ i ] ; e [ i ] = 0 . ; }
for ( int j = ( i > 0 ? i - 1 : 0 ) ; j < nn ; j ++ ) { norm += abs ( H [ i ] [ j ] ) ; } } // Outer loop over eigenvalue index
{ double exshift = 0.0 ; // double p = 0 , q = 0 , r = 0 ; / / , s = 0 , z = 0;
int iter = 0 ; for ( int n = nn - 1 ; n >= low ; ) { // Look for single small sub - diagonal element
int l = n ; for ( ; l > low ; -- l ) { double s = abs ( H [ l - 1 ] [ l - 1 ] ) + abs ( H [ l ] [ l ] ) ; if ( abs ( H [ l ] [ l - 1 ] ) < 0x1P-52 * ( s == 0. ? norm : s ) ) { break ; } } // Check for convergence
if ( l == n ) { // One root found
d [ n ] = ( H [ n ] [ n ] += exshift ) ; e [ n ] = 0. ; n -- ; iter = 0 ; continue ; } final double [ ] Hn = H [ n ] , Hnm1 = H [ n - 1 ] ; if ( l == n - 1 ) { // Two roots found
double w = Hn [ n - 1 ] * Hnm1 [ n ] ; double p = ( Hnm1 [ n - 1 ] - Hn [ n ] ) * 0.5 , q = p * p + w ; double x = ( Hn [ n ] += exshift ) , z = FastMath . sqrt ( abs ( q ) ) ; Hnm1 [ n - 1 ] += exshift ; if ( q >= 0 ) { // Real pair
z = ( p >= 0 ) ? p + z : p - z ; d [ n - 1 ] = x + z ; d [ n ] = ( z != 0. ) ? x - w / z : d [ n - 1 ] ; e [ n ] = e [ n - 1 ] = 0. ; x = Hn [ n - 1 ] ; double s = abs ( x ) + abs ( z ) ; p = x / s ; q = z / s ; double r = FastMath . hypot ( p , q ) ; p /= r ; q /= r ; // Row modification
for ( int j = n - 1 ; j < nn ; j ++ ) { double tmp = Hnm1 [ j ] ; Hnm1 [ j ] = q * tmp + p * Hn [ j ] ; Hn [ j ] = q * Hn [ j ] - p * tmp ; } // Column modification
for ( int i = 0 ; i <= n ; i ++ ) { modifyQP ( H [ i ] , n , p , q ) ; } // Accumulate transformations
for ( int i = low ; i <= high ; i ++ ) { modifyQP ( V [ i ] , n , p , q ) ; } } else { // Complex pair
d [ n ] = d [ n - 1 ] = x + p ; e [ n ] = - ( e [ n - 1 ] = z ) ; } n -= 2 ; iter = 0 ; continue ; } // No convergence yet
// Form shift
double x = Hn [ n ] , y = 0. , w = 0. ; y = Hnm1 [ n - 1 ] ; w = Hn [ n - 1 ] * Hnm1 [ n ] ; // Wilkinson ' s original ad hoc shift
if ( iter == 10 ) { exshift += x ; for ( int i = low ; i <= n ; i ++ ) { H [ i ] [ i ] -= x ; } double s = abs ( Hn [ n - 1 ] ) + abs ( Hnm1 [ n - 2 ] ) ; x = y = 0.75 * s ; w = - 0.4375 * s * s ; } // MATLAB ' s new ad hoc shift
if ( iter == 30 ) { double s = ( y - x ) * 0.5 ; s = s * s + w ; if ( s > 0 ) { s = FastMath . sqrt ( s ) ; s = ( y < x ) ? - s : s ; s = x - w / ( ( y - x ) * 0.5 + s ) ; for ( int i = low ; i <= n ; i ++ ) { H [ i ] [ i ] -= s ; } exshift += s ; x = y = w = 0.964 ; } } ++ iter ; // ( Could check iteration count here . )
double p = 0. , q = 0. , r = 0. ; // Look for two consecutive small sub - diagonal elements
int m = n - 2 ; for ( ; m >= l ; m -- ) { final double [ ] Hm = H [ m ] , Hmp1 = H [ m + 1 ] ; final double z = Hm [ m ] , xz = x - z ; p = ( xz * ( y - z ) - w ) / Hmp1 [ m ] + Hm [ m + 1 ] ; q = Hmp1 [ m + 1 ] - xz - y ; r = H [ m + 2 ] [ m + 1 ] ; double tmp = abs ( p ) + abs ( q ) + abs ( r ) ; p /= tmp ; q /= tmp ; r /= tmp ; if ( m == l || abs ( Hm [ m - 1 ] ) * ( abs ( q ) + abs ( r ) ) < 0x1P-52 * ( abs ( p ) * ( abs ( H [ m - 1 ] [ m - 1 ] ) + abs ( z ) + abs ( Hmp1 [ m + 1 ] ) ) ) ) { break ; } } for ( int i = m + 2 ; i <= n ; i ++ ) { final double [ ] Hi = H [ i ] ; Hi [ i - 2 ] = 0. ; if ( i > m + 2 ) { Hi [ i - 3 ] = 0. ; } } // Double QR step involving rows l : n and columns m : n
for ( int k = m , last = n - 1 ; k <= last ; k ++ ) { boolean notlast = ( k != last ) ; final double [ ] Hk = H [ k ] , Hkp1 = H [ k + 1 ] , Hkp2 = notlast ? H [ k + 2 ] : null ; if ( k != m ) { p = Hk [ k - 1 ] ; q = Hkp1 [ k - 1 ] ; r = notlast ? Hkp2 [ k - 1 ] : 0. ; x = abs ( p ) + abs ( q ) + abs ( r ) ; if ( x == 0.0 ) { continue ; // Jama 1.0.3 fix
} p /= x ; q /= x ; r /= x ; } double s = FastMath . hypot ( p , q , r ) ; s = ( p < 0 ) ? - s : s ; if ( s != 0 ) { if ( k != m ) { Hk [ k - 1 ] = - s * x ; } else if ( l != m ) { Hk [ k - 1 ] = - Hk [ k - 1 ] ; } p += s ; x = p / s ; y = q / s ; double z = r / s ; q /= p ; r /= p ; // Row modification
for ( int j = k ; j < nn ; j ++ ) { double tmp = Hk [ j ] + q * Hkp1 [ j ] ; if ( notlast ) { tmp += r * Hkp2 [ j ] ; Hkp2 [ j ] -= tmp * z ; } Hk [ j ] -= tmp * x ; Hkp1 [ j ] -= tmp * y ; } // Column modification
for ( int i = 0 ; i <= Math . min ( n , k + 3 ) ; i ++ ) { modifyQR ( H [ i ] , k , notlast , q , r , x , y , z ) ; } // Accumulate transformations
for ( int i = low ; i <= high ; i ++ ) { modifyQR ( V [ i ] , k , notlast , q , r , x , y , z ) ; } } // ( s ! = 0)
} // k loop
// check convergence
} // while ( n > = low )
} if ( norm == 0.0 ) { return ; } // Backsubstitute to find vectors of upper triangular form
for ( int n = nn - 1 ; n >= 0 ; n -- ) { double p = d [ n ] , q = e [ n ] ; if ( q == 0 ) { hqr2BacksubstituteReal ( n , p , norm ) ; } else if ( q < 0 ) { hqr2BacksubstituteComplex ( n , p , q , norm ) ; } } // Vectors of isolated roots
// Only matters if the user can modify low , high :
// for ( int i = 0 ; i < nn ; i + + ) {
// if ( i < low | | i > high ) { System . arraycopy ( H [ i ] , i , V [ i ] , i , nn - i ) ; }
// Back transformation to get eigenvectors of original matrix
hqr2BackTransformation ( nn , low , high ) ; |
public class Negotiation { /** * ' Accept ' based negotiation .
* This method determines the result to send to the client based on the ' Accept ' header of the request .
* The returned result is enhanced with the ' Vary ' header set to { @ literal Accept } .
* This methods retrieves the accepted media type in their preference order and check ,
* one - by - one if the given results match one of them . So , it ensures we get the most acceptable result .
* @ param results the set of result structured as follows : mime - type - > result . The mime - type ( keys ) must be
* valid mime type such as ' application / json ' or ' text / html ' .
* @ return the selected result , or a result with the status { @ link org . wisdom . api . http . Status # NOT _ ACCEPTABLE } if
* none of the given results match the request . */
public static Result accept ( Map < String , ? extends Result > results ) { } } | Context context = Context . CONTEXT . get ( ) ; if ( context == null ) { throw new IllegalStateException ( "Negotiation cannot be achieved outside of a request" ) ; } Collection < MediaType > accepted = context . request ( ) . mediaTypes ( ) ; // accepted cannot be empty , if the header is missing text / * is added .
for ( MediaType media : accepted ) { // Do we have a matching key .
for ( Map . Entry < String , ? extends Result > entry : results . entrySet ( ) ) { MediaType input = MediaType . parse ( entry . getKey ( ) ) ; if ( input . is ( media ) ) { return entry . getValue ( ) . with ( HeaderNames . VARY , HeaderNames . ACCEPT ) ; } } } return Results . status ( Status . NOT_ACCEPTABLE ) ; |
public class ResourceLimiter { /** * Enable an experimental feature that will throttle requests made from { @ link BulkMutation } . The
* logic is as follows :
* < ul >
* < li > To start : < ul >
* < li > reduce parallelism to 25 % - - The parallelism is high to begin with . This reduction should
* reduce the impacts of a bursty job , such as those found in Dataflow .
* < / ul >
* < li > every 20 seconds :
* < pre >
* if ( rpc _ latency & gt ; threshold ) {
* decrease parallelism by 10 % of original maximum .
* } else if ( rpc _ latency & lt ; threshold & amp ; & amp ; rpcsWereThrottled ( ) ) {
* increase parallelism by 5 % of original maximum .
* < / pre >
* NOTE : increases are capped by the initial maximum . Decreases are floored at 1 % of the
* original maximum so that there is some level of throughput .
* < / ul >
* @ param bulkMutationRpcTargetMs the target for latency of MutateRows requests in milliseconds . */
public synchronized void throttle ( final int bulkMutationRpcTargetMs ) { } } | if ( isThrottling ) { // Throttling was already turned on . No need to do it again .
return ; } LOG . info ( "Initializing BulkMutation throttling. " + "Once latency is higher than %d ms, parallelism will be reduced." , bulkMutationRpcTargetMs ) ; // target roughly 20 % within the the target so that there isn ' t too much churn
final long highTargetMs = ( long ) ( bulkMutationRpcTargetMs * 1.2 ) ; final long lowTargetMs = ( long ) ( bulkMutationRpcTargetMs * 0.8 ) ; // Increase at 5 % of the maximum RPC count increments , and reduce at 10 % . The basic assumption
// is that the throttling should be aggressive , and caution should be taken to not go over the
// latency cap . The assumption is that maximizing throughput is less important than system
// stability .
// The maximum in flight RPCs is pretty high . Start with a significantly reduced number , and
// then work up or down .
setCurrentInFlightMaxRpcs ( getCurrentInFlightMaxRpcs ( ) / 4 ) ; Runnable r = new Runnable ( ) { @ Override public void run ( ) { long meanLatencyMs = getMeanMs ( stats . getMutationTimer ( ) ) ; if ( meanLatencyMs >= bulkMutationRpcTargetMs * 3 ) { // decrease at 30 % of the maximum RPCs , with a minimum of 2.5%
reduceParallelism ( meanLatencyMs , absoluteMaxInFlightRpcs * 3 / 10 ) ; } else if ( meanLatencyMs >= highTargetMs ) { // decrease at 10 % of the maximum RPCs , with a minimum of 2.5%
reduceParallelism ( meanLatencyMs , absoluteMaxInFlightRpcs / 10 ) ; } else if ( getMeanMs ( stats . getThrottlingTimer ( ) ) > 1 ) { if ( meanLatencyMs <= lowTargetMs ) { // if latency is low , and there was throttling of at least one millisecond , then
// increase the parallelism so that new calls will not be throttled .
// Increase parallelism at a slower than we decrease . The lower rate should help the
// system maintain stability .
increaseParallelism ( meanLatencyMs , absoluteMaxInFlightRpcs / 20 ) ; } else if ( currentInFlightMaxRpcs < absoluteMaxInFlightRpcs / 20 && meanLatencyMs <= ( bulkMutationRpcTargetMs * 2 ) ) { // For some reason , when parallelism is reduced latency tends to be artificially higher .
// Increase slowly to ensure that the system restabilizes .
increaseParallelism ( meanLatencyMs , absoluteMaxInFlightRpcs / 50 ) ; } } } private long getMeanMs ( Timer timer ) { return TimeUnit . NANOSECONDS . toMillis ( ( long ) timer . getSnapshot ( ) . getMean ( ) ) ; } private void reduceParallelism ( long meanLatencyNanos , int step ) { int minimumRpcCount = Math . max ( absoluteMaxInFlightRpcs / 100 , 1 ) ; int newValue = Math . max ( currentInFlightMaxRpcs - step , minimumRpcCount ) ; setParallelism ( meanLatencyNanos , "Reducing" , newValue ) ; } private void increaseParallelism ( long meanLatencyNanos , int incrementStep ) { int newValue = Math . min ( currentInFlightMaxRpcs + incrementStep , absoluteMaxInFlightRpcs ) ; setParallelism ( meanLatencyNanos , "Increasing" , newValue ) ; } private void setParallelism ( long meanLatencyNanos , String type , int newValue ) { int currentValue = getCurrentInFlightMaxRpcs ( ) ; if ( newValue != currentValue ) { setCurrentInFlightMaxRpcs ( newValue ) ; LOG . debug ( "Latency is at %d ms. %s paralellelism from %d to %d." , TimeUnit . NANOSECONDS . toMillis ( meanLatencyNanos ) , type , currentValue , newValue ) ; } } } ; // 20 seconds is an assumption that seems to work . In bad situations , the throttling will start
// at 50 % , and then 5 cycles , or 100 seconds later .
BigtableSessionSharedThreadPools . getInstance ( ) . getRetryExecutor ( ) . scheduleAtFixedRate ( r , 20 , 20 , TimeUnit . SECONDS ) ; isThrottling = true ; |
public class ExceptionUtils { /** * Addes a message at the beginning of the stacktrace . */
public static void insertMessage ( Throwable onObject , String msg ) { } } | try { Field field = Throwable . class . getDeclaredField ( "detailMessage" ) ; // Method ( " initCause " , new Class [ ] { Throwable . class } ) ;
field . setAccessible ( true ) ; if ( onObject . getMessage ( ) != null ) { field . set ( onObject , "\n[\n" + msg + "\n]\n[\nMessage: " + onObject . getMessage ( ) + "\n]" ) ; } else { field . set ( onObject , "\n[\n" + msg + "]\n" ) ; } } catch ( RuntimeException e ) { throw e ; } catch ( Exception e ) { } |
public class WrapperId { /** * Returns a ByteArray containing the serialized BeanId that corresponds
* to this WrapperId instance . < p > */
public ByteArray getBeanIdArray ( ) { } } | int beanIdLength = data . length - ivBeanIdIndex ; byte [ ] beanIdBytes = new byte [ beanIdLength ] ; System . arraycopy ( data , ivBeanIdIndex , beanIdBytes , 0 , beanIdLength ) ; return new ByteArray ( beanIdBytes ) ; |
public class ChangeObjects { /** * method to delete a MonomerNotation at a specific position of the
* PolymerNotation
* @ param position
* position of the to be deleted MonomerNotation
* @ param polymer
* PolymerNotation
* @ throws NotationException
* if the generated PolymerNotation has no elements after
* deleting the MonomerNotation */
public final static void deleteMonomerNotation ( int position , PolymerNotation polymer ) throws NotationException { } } | MonomerNotation monomerNotation = polymer . getPolymerElements ( ) . getListOfElements ( ) . get ( position ) ; if ( polymer . getPolymerElements ( ) . getListOfElements ( ) . size ( ) == 1 ) { throw new NotationException ( monomerNotation . toString ( ) + " can't be removed. Polymer has to have at least one Monomer Notation" ) ; } polymer . getPolymerElements ( ) . getListOfElements ( ) . remove ( monomerNotation ) ; |
public class F0 { /** * Applies this partial function to the given argument when it is contained in the function domain .
* Applies fallback function where this partial function is not defined , i . e . any
* { @ link java . lang . RuntimeException } is captured
* @ param fallback
* if { @ link RuntimeException } captured then apply this fallback function
* @ return the result of this function or the fallback function application */
public R applyOrElse ( F0 < ? extends R > fallback ) { } } | try { return apply ( ) ; } catch ( RuntimeException e ) { return fallback . apply ( ) ; } |
public class BigQuerySnippets { /** * [ VARIABLE " my _ dataset _ name " ] */
public Dataset createDataset ( String datasetName ) { } } | // [ START bigquery _ create _ dataset ]
Dataset dataset = null ; DatasetInfo datasetInfo = DatasetInfo . newBuilder ( datasetName ) . build ( ) ; try { // the dataset was created
dataset = bigquery . create ( datasetInfo ) ; } catch ( BigQueryException e ) { // the dataset was not created
} // [ END bigquery _ create _ dataset ]
return dataset ; |
public class SubCommandMetaSet { /** * Parses command - line and sets metadata .
* @ param args Command - line input
* @ param printHelp Tells whether to print help only or execute command
* actually
* @ throws Exception */
@ SuppressWarnings ( "unchecked" ) public static void executeCommand ( String [ ] args ) throws Exception { } } | OptionParser parser = getParser ( ) ; // declare parameters
List < String > meta = null ; String url = null ; List < Integer > nodeIds = null ; Boolean allNodes = true ; Boolean confirm = false ; // parse command - line input
args = AdminToolUtils . copyArrayAddFirst ( args , "--" + OPT_HEAD_META_SET ) ; OptionSet options = parser . parse ( args ) ; if ( options . has ( AdminParserUtils . OPT_HELP ) ) { printHelp ( System . out ) ; return ; } // check required options and / or conflicting options
AdminParserUtils . checkRequired ( options , OPT_HEAD_META_SET ) ; AdminParserUtils . checkRequired ( options , AdminParserUtils . OPT_URL ) ; AdminParserUtils . checkOptional ( options , AdminParserUtils . OPT_NODE , AdminParserUtils . OPT_ALL_NODES ) ; // load parameters
meta = AdminToolUtils . getValueList ( ( List < String > ) options . valuesOf ( OPT_HEAD_META_SET ) , "=" ) ; if ( meta . size ( ) != 2 && meta . size ( ) != 4 ) { throw new VoldemortException ( "Invalid metakey-metafile pairs." ) ; } url = ( String ) options . valueOf ( AdminParserUtils . OPT_URL ) ; if ( options . has ( AdminParserUtils . OPT_NODE ) ) { nodeIds = ( List < Integer > ) options . valuesOf ( AdminParserUtils . OPT_NODE ) ; allNodes = false ; } if ( options . has ( AdminParserUtils . OPT_CONFIRM ) ) { confirm = true ; } // print summary
System . out . println ( "Set metadata" ) ; System . out . println ( "Metadata:" ) ; for ( Integer i = 0 ; i < meta . size ( ) ; i += 2 ) { System . out . println ( " set \'" + meta . get ( i ) + "\' from file \'" + meta . get ( i + 1 ) + "\'" ) ; } System . out . println ( "Location:" ) ; System . out . println ( " bootstrap url = " + url ) ; if ( allNodes ) { System . out . println ( " node = all nodes" ) ; } else { System . out . println ( " node = " + Joiner . on ( ", " ) . join ( nodeIds ) ) ; } AdminClient adminClient = AdminToolUtils . getAdminClient ( url ) ; if ( allNodes ) { nodeIds = AdminToolUtils . getAllNodeIds ( adminClient ) ; } AdminToolUtils . assertServerNotInRebalancingState ( adminClient , nodeIds ) ; if ( meta . size ( ) == 2 ) { String metaKey = meta . get ( 0 ) , metaValue = meta . get ( 1 ) ; String metaFile = metaValue . replace ( "~" , System . getProperty ( "user.home" ) ) ; if ( metaKey . equals ( MetadataStore . STORES_KEY ) ) { if ( ! Utils . isReadableFile ( metaFile ) ) { throw new VoldemortException ( "Stores definition xml file path incorrect" ) ; } StoreDefinitionsMapper mapper = new StoreDefinitionsMapper ( ) ; List < StoreDefinition > newStoreDefs = mapper . readStoreList ( new File ( metaFile ) ) ; StoreDefinitionUtils . validateSchemasAsNeeded ( newStoreDefs ) ; // original metadata
Integer nodeIdToGetStoreXMLFrom = nodeIds . iterator ( ) . next ( ) ; Versioned < String > storesXML = adminClient . metadataMgmtOps . getRemoteMetadata ( nodeIdToGetStoreXMLFrom , MetadataStore . STORES_KEY ) ; List < StoreDefinition > oldStoreDefs = mapper . readStoreList ( new StringReader ( storesXML . getValue ( ) ) ) ; printChangeStoreSummary ( oldStoreDefs , newStoreDefs , System . err ) ; if ( ! AdminToolUtils . askConfirm ( confirm , "set metadata" ) ) { return ; } // execute command
doMetaSet ( adminClient , nodeIds , metaKey , mapper . writeStoreList ( newStoreDefs ) ) ; if ( ! allNodes ) { System . err . println ( "WARNING: Metadata version update of stores goes to all servers, " + "although this set-metadata oprations only goes to node: " ) ; for ( Integer nodeId : nodeIds ) { System . err . println ( nodeId ) ; } } doMetaUpdateVersionsOnStores ( adminClient , oldStoreDefs , newStoreDefs ) ; } else { // execute command
if ( ! AdminToolUtils . askConfirm ( confirm , "set metadata" ) ) { return ; } if ( metaKey . equals ( MetadataStore . CLUSTER_KEY ) || metaKey . equals ( MetadataStore . REBALANCING_SOURCE_CLUSTER_XML ) ) { if ( ! Utils . isReadableFile ( metaFile ) ) { throw new VoldemortException ( "Cluster xml file path incorrect" ) ; } ClusterMapper mapper = new ClusterMapper ( ) ; Cluster newCluster = mapper . readCluster ( new File ( metaFile ) ) ; doMetaSet ( adminClient , nodeIds , metaKey , mapper . writeCluster ( newCluster ) ) ; } else if ( metaKey . equals ( MetadataStore . SLOP_STREAMING_ENABLED_KEY ) || metaKey . equals ( MetadataStore . PARTITION_STREAMING_ENABLED_KEY ) || metaKey . equals ( MetadataStore . READONLY_FETCH_ENABLED_KEY ) || metaKey . equals ( MetadataStore . QUOTA_ENFORCEMENT_ENABLED_KEY ) ) { doMetaSet ( adminClient , nodeIds , metaKey , metaValue ) ; } else if ( metaKey . equals ( KEY_OFFLINE ) ) { boolean setOffline = Boolean . parseBoolean ( metaValue ) ; if ( setOffline && nodeIds . size ( ) > 1 ) { throw new VoldemortException ( "Setting more than one node to offline is not allowed." ) ; } for ( Integer nodeId : nodeIds ) { adminClient . metadataMgmtOps . setRemoteOfflineState ( nodeId , setOffline ) ; } } else if ( metaKey . equals ( MetadataStore . REBALANCING_STEAL_INFO ) ) { if ( ! Utils . isReadableFile ( metaFile ) ) { throw new VoldemortException ( "Rebalancing steal info file path incorrect" ) ; } String rebalancingStealInfoJsonString = FileUtils . readFileToString ( new File ( metaFile ) ) ; RebalancerState state = RebalancerState . create ( rebalancingStealInfoJsonString ) ; doMetaSet ( adminClient , nodeIds , metaKey , state . toJsonString ( ) ) ; } else { throw new VoldemortException ( "Incorrect metadata key" ) ; } } } else if ( meta . size ( ) == 4 ) { // set metadata pair cluster . xml , stores . xml
String clusterFile , storesFile ; if ( meta . get ( 0 ) . equals ( MetadataStore . CLUSTER_KEY ) && meta . get ( 2 ) . equals ( MetadataStore . STORES_KEY ) ) { clusterFile = meta . get ( 1 ) ; storesFile = meta . get ( 3 ) ; } else if ( meta . get ( 0 ) . equals ( MetadataStore . STORES_KEY ) && meta . get ( 2 ) . equals ( MetadataStore . CLUSTER_KEY ) ) { storesFile = meta . get ( 1 ) ; clusterFile = meta . get ( 3 ) ; } else { throw new VoldemortException ( "meta set-pair keys must be <cluster.xml, stores.xml>" ) ; } clusterFile = clusterFile . replace ( "~" , System . getProperty ( "user.home" ) ) ; storesFile = storesFile . replace ( "~" , System . getProperty ( "user.home" ) ) ; ClusterMapper clusterMapper = new ClusterMapper ( ) ; StoreDefinitionsMapper storeDefsMapper = new StoreDefinitionsMapper ( ) ; // original metadata
Integer nodeIdToGetStoreXMLFrom = nodeIds . iterator ( ) . next ( ) ; Versioned < String > storesXML = adminClient . metadataMgmtOps . getRemoteMetadata ( nodeIdToGetStoreXMLFrom , MetadataStore . STORES_KEY ) ; List < StoreDefinition > oldStoreDefs = storeDefsMapper . readStoreList ( new StringReader ( storesXML . getValue ( ) ) ) ; if ( ! Utils . isReadableFile ( clusterFile ) ) { throw new VoldemortException ( "Cluster xml file path incorrect" ) ; } Cluster cluster = clusterMapper . readCluster ( new File ( clusterFile ) ) ; if ( ! Utils . isReadableFile ( storesFile ) ) { throw new VoldemortException ( "Stores definition xml file path incorrect" ) ; } List < StoreDefinition > newStoreDefs = storeDefsMapper . readStoreList ( new File ( storesFile ) ) ; StoreDefinitionUtils . validateSchemasAsNeeded ( newStoreDefs ) ; printChangeStoreSummary ( oldStoreDefs , newStoreDefs , System . err ) ; if ( ! AdminToolUtils . askConfirm ( confirm , "set metadata" ) ) { return ; } // execute command
doMetaSetPair ( adminClient , nodeIds , clusterMapper . writeCluster ( cluster ) , storeDefsMapper . writeStoreList ( newStoreDefs ) ) ; if ( ! allNodes ) { System . err . println ( "WARNING: Metadata version update of stores goes to all servers, " + "although this set-metadata oprations only goes to node: " ) ; for ( Integer nodeId : nodeIds ) { System . err . println ( nodeId ) ; } } doMetaUpdateVersionsOnStores ( adminClient , oldStoreDefs , newStoreDefs ) ; } |
public class AdminToolLog4j2Util { /** * returns the log messages from custom appenders output stream
* @ param appenderName
* @ param encoding
* @ return
* @ throws UnsupportedEncodingException
* @ since 1.1.1 */
public String getStringOutput ( String appenderName , String encoding ) throws UnsupportedEncodingException { } } | AdminToolLog4j2OutputStream baos = outputStreams . get ( appenderName ) ; String output = "" ; if ( null != baos ) { output = baos . getAndReset ( encoding ) ; } return output . trim ( ) . isEmpty ( ) ? null : output ; |
public class ST_RemoveRepeatedPoints { /** * Removes duplicated coordinates within a LineString .
* @ param linestring
* @ param tolerance to delete the coordinates
* @ return
* @ throws java . sql . SQLException */
public static LineString removeDuplicateCoordinates ( LineString linestring , double tolerance ) throws SQLException { } } | Coordinate [ ] coords = CoordinateUtils . removeRepeatedCoordinates ( linestring . getCoordinates ( ) , tolerance , false ) ; if ( coords . length < 2 ) { throw new SQLException ( "Not enough coordinates to build a new LineString.\n Please adjust the tolerance" ) ; } return FACTORY . createLineString ( coords ) ; |
public class JmxClient { /** * Return the value of a JMX attribute as a String . */
public String getAttributeString ( String domain , String beanName , String attributeName ) throws Exception { } } | return getAttributeString ( ObjectNameUtil . makeObjectName ( domain , beanName ) , attributeName ) ; |
public class HttpEncodingTools { /** * Encodes characters in the string except for those allowed in an absolute path .
* @ deprecated Prefer to use { @ link HttpEncodingTools # encode ( String ) } instead for encoding specific
* pieces of the URI . This method does not escape certain reserved characters , like ' / ' and ' : ' .
* As such , this is not safe to use on paths that may contain these reserved characters in the wrong places . */
@ Deprecated public static String encodePath ( String path ) { } } | try { return URIUtil . encodePath ( path , "UTF-8" ) ; } catch ( URIException ex ) { throw new EsHadoopIllegalArgumentException ( "Cannot encode path segment [" + path + "]" , ex ) ; } |
public class JbcSrcRuntime { /** * Helper function to translate NullData - > null when resolving a SoyValueProvider . */
public static SoyValue resolveSoyValueProvider ( SoyValueProvider provider ) { } } | SoyValue value = provider . resolve ( ) ; return handleTofuNull ( value ) ; |
public class KAMStoreImpl { /** * { @ inheritDoc } */
@ Override public List < AnnotationType > getAnnotationTypes ( KamInfo ki ) { } } | if ( ki == null ) throw new InvalidArgument ( DEFAULT_MSG ) ; if ( ! exists ( ki ) ) return null ; try { return kamStoreDao ( ki ) . getAnnotationTypes ( ) ; } catch ( SQLException e ) { final String fmt = "error getting annotation types for %s" ; final String msg = format ( fmt , ki . getName ( ) ) ; throw new KAMStoreException ( msg , e ) ; } |
public class CommerceShipmentLocalServiceBaseImpl { /** * Returns the number of rows matching the dynamic query .
* @ param dynamicQuery the dynamic query
* @ param projection the projection to apply to the query
* @ return the number of rows matching the dynamic query */
@ Override public long dynamicQueryCount ( DynamicQuery dynamicQuery , Projection projection ) { } } | return commerceShipmentPersistence . countWithDynamicQuery ( dynamicQuery , projection ) ; |
public class LogEntry { /** * Sets the event type of the log entry ( { @ link # eventType } ) .
* @ param eventType Event type to set .
* @ throws LockingException if the field EVENTTYPE is locked and the
* given event type differs from { @ link # eventType } .
* @ return < code > true < / code > if { @ link # eventType } was modified ; < br >
* < code > false < / code > otherwise . */
public boolean setEventType ( EventType eventType ) throws LockingException { } } | Validate . notNull ( eventType ) ; if ( isFieldLocked ( EntryField . EVENTTYPE ) ) { if ( ! this . eventType . equals ( eventType ) ) { throw new LockingException ( EntryField . EVENTTYPE ) ; } return false ; } else { this . eventType = eventType ; return true ; } |
public class VirtualMachineScaleSetsInner { /** * Create or update a VM scale set .
* @ param resourceGroupName The name of the resource group .
* @ param vmScaleSetName The name of the VM scale set to create or update .
* @ param parameters The scale set object .
* @ param serviceCallback the async ServiceCallback to handle successful and failed responses .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the { @ link ServiceFuture } object */
public ServiceFuture < VirtualMachineScaleSetInner > createOrUpdateAsync ( String resourceGroupName , String vmScaleSetName , VirtualMachineScaleSetInner parameters , final ServiceCallback < VirtualMachineScaleSetInner > serviceCallback ) { } } | return ServiceFuture . fromResponse ( createOrUpdateWithServiceResponseAsync ( resourceGroupName , vmScaleSetName , parameters ) , serviceCallback ) ; |
public class POJOCreator { /** * This function is called for each incoming byte record .
* @ param decoder The decoder that encapsulates the byte [ ] data record
* @ return Map of method and the input parameter objects
* @ throws java . io . IOException if the { @ link co . cask . tigon . internal . io . ReflectionDatumReader } cannot decode incoming
* data record */
public Object decode ( Decoder decoder ) throws IOException { } } | try { return outputGenerator . read ( decoder , schema ) ; } catch ( IOException e ) { LOG . error ( "Cannot instantiate object of type {}" , outputClass . getName ( ) , e ) ; throw e ; } |
public class TimestampParseExprMacro { /** * Default formatter that parses according to the docs for this method : " If the pattern is not provided , this parses
* time strings in either ISO8601 or SQL format . " */
private static DateTimes . UtcFormatter createDefaultParser ( final DateTimeZone timeZone ) { } } | final DateTimeFormatter offsetElement = new DateTimeFormatterBuilder ( ) . appendTimeZoneOffset ( "Z" , true , 2 , 4 ) . toFormatter ( ) ; DateTimeParser timeOrOffset = new DateTimeFormatterBuilder ( ) . append ( null , new DateTimeParser [ ] { new DateTimeFormatterBuilder ( ) . appendLiteral ( 'T' ) . toParser ( ) , new DateTimeFormatterBuilder ( ) . appendLiteral ( ' ' ) . toParser ( ) } ) . appendOptional ( ISODateTimeFormat . timeElementParser ( ) . getParser ( ) ) . appendOptional ( offsetElement . getParser ( ) ) . toParser ( ) ; return DateTimes . wrapFormatter ( new DateTimeFormatterBuilder ( ) . append ( ISODateTimeFormat . dateElementParser ( ) ) . appendOptional ( timeOrOffset ) . toFormatter ( ) . withZone ( timeZone ) ) ; |
public class CollectionUtils { /** * Returns a stream containing the members of the given collection . */
public static < T , C extends Collection < T > > Stream < T > membersOf ( C collection ) { } } | return collection == null ? Stream . empty ( ) : collection . stream ( ) ; |
public class GenericIndexedWriter { /** * Tries to get best value split ( number of elements in each value file ) which can be expressed as power of 2.
* @ return Returns the size of value file splits as power of 2.
* @ throws IOException */
private int bagSizePower ( ) throws IOException { } } | long avgObjectSize = ( valuesOut . size ( ) + numWritten - 1 ) / numWritten ; for ( int i = 31 ; i >= 0 ; -- i ) { if ( ( 1L << i ) * avgObjectSize <= fileSizeLimit ) { if ( actuallyFits ( i ) ) { return i ; } } } throw new ISE ( "no value split found with fileSizeLimit [%d], avgObjectSize [%d]" , fileSizeLimit , avgObjectSize ) ; |
public class AssetsApi { /** * Get corporation asset names Return names for a set of item ids , which you
* can get from corporation assets endpoint . Only valid for items that can
* customize names , like containers or ships - - - Requires one of the
* following EVE corporation role ( s ) : Director SSO Scope :
* esi - assets . read _ corporation _ assets . v1
* @ param corporationId
* An EVE corporation ID ( required )
* @ param requestBody
* A list of item ids ( required )
* @ param datasource
* The server name you would like data from ( optional , default to
* tranquility )
* @ param token
* Access token to use if unable to set a header ( optional )
* @ return List & lt ; CorporationAssetsNamesResponse & gt ;
* @ throws ApiException
* If fail to call the API , e . g . server error or cannot
* deserialize the response body */
public List < CorporationAssetsNamesResponse > postCorporationsCorporationIdAssetsNames ( Integer corporationId , List < Long > requestBody , String datasource , String token ) throws ApiException { } } | ApiResponse < List < CorporationAssetsNamesResponse > > resp = postCorporationsCorporationIdAssetsNamesWithHttpInfo ( corporationId , requestBody , datasource , token ) ; return resp . getData ( ) ; |
public class TerminateJobRequestMarshaller { /** * Marshall the given parameter object . */
public void marshall ( TerminateJobRequest terminateJobRequest , ProtocolMarshaller protocolMarshaller ) { } } | if ( terminateJobRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( terminateJobRequest . getJobId ( ) , JOBID_BINDING ) ; protocolMarshaller . marshall ( terminateJobRequest . getReason ( ) , REASON_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class OutputMarshaller { /** * Marshall the given parameter object . */
public void marshall ( Output output , ProtocolMarshaller protocolMarshaller ) { } } | if ( output == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( output . getName ( ) , NAME_BINDING ) ; protocolMarshaller . marshall ( output . getKinesisStreamsOutput ( ) , KINESISSTREAMSOUTPUT_BINDING ) ; protocolMarshaller . marshall ( output . getKinesisFirehoseOutput ( ) , KINESISFIREHOSEOUTPUT_BINDING ) ; protocolMarshaller . marshall ( output . getLambdaOutput ( ) , LAMBDAOUTPUT_BINDING ) ; protocolMarshaller . marshall ( output . getDestinationSchema ( ) , DESTINATIONSCHEMA_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class CheckMissingAndExtraRequires { /** * If this returns true , check for @ extends and @ implements annotations on this node . Otherwise ,
* it ' s probably an alias for an existing class , so skip those annotations .
* @ return Whether the given node declares a function . True for the following forms :
* < li >
* < pre > function foo ( ) { } < / pre >
* < li >
* < pre > var foo = function ( ) { } ; < / pre >
* < li >
* < pre > foo . bar = function ( ) { } ; < / pre > */
private boolean declaresFunctionOrClass ( Node n ) { } } | if ( n . isFunction ( ) || n . isClass ( ) ) { return true ; } if ( n . isAssign ( ) && ( n . getLastChild ( ) . isFunction ( ) || n . getLastChild ( ) . isClass ( ) ) ) { return true ; } if ( NodeUtil . isNameDeclaration ( n ) && n . getFirstChild ( ) . hasChildren ( ) && ( n . getFirstFirstChild ( ) . isFunction ( ) || n . getFirstFirstChild ( ) . isClass ( ) ) ) { return true ; } return false ; |
public class DefaultInterval { /** * ( non - Javadoc )
* @ see org . virginia . pbhs . parameters . Interval # getMinimumStart ( ) */
@ Override public Long getMinimumStart ( ) { } } | calculator ( ) ; if ( simple ) { return v [ 0 ] ; } else { Weight minStart = cn . getMinimumStart ( ) ; return minStart . isInfinity ( ) ? null : minStart . value ( ) ; } |
public class SessionCommandException { /** * Converts a Throwable to a SessionCommandException . If the Throwable is a
* SessionCommandException , it will be passed through unmodified ; otherwise , it will be wrapped
* in a new SessionCommandException .
* @ param cause the Throwable to convert
* @ return a SessionCommandException */
public static SessionCommandException fromThrowable ( Throwable cause ) { } } | return ( cause instanceof SessionCommandException ) ? ( SessionCommandException ) cause : new SessionCommandException ( cause ) ; |
public class NativeArray { /** * See Ecma 262v3 15.4.4.4 */
private static Scriptable js_concat ( Context cx , Scriptable scope , Scriptable thisObj , Object [ ] args ) { } } | // create an empty Array to return .
scope = getTopLevelScope ( scope ) ; Scriptable result = cx . newArray ( scope , 0 ) ; if ( thisObj instanceof NativeArray && result instanceof NativeArray ) { NativeArray denseThis = ( NativeArray ) thisObj ; NativeArray denseResult = ( NativeArray ) result ; if ( denseThis . denseOnly && denseResult . denseOnly ) { // First calculate length of resulting array
boolean canUseDense = true ; int length = ( int ) denseThis . length ; for ( int i = 0 ; i < args . length && canUseDense ; i ++ ) { if ( args [ i ] instanceof NativeArray ) { // only try to use dense approach for Array - like
// objects that are actually NativeArrays
final NativeArray arg = ( NativeArray ) args [ i ] ; canUseDense = arg . denseOnly ; length += arg . length ; } else { length ++ ; } } if ( canUseDense && denseResult . ensureCapacity ( length ) ) { System . arraycopy ( denseThis . dense , 0 , denseResult . dense , 0 , ( int ) denseThis . length ) ; int cursor = ( int ) denseThis . length ; for ( int i = 0 ; i < args . length && canUseDense ; i ++ ) { if ( args [ i ] instanceof NativeArray ) { NativeArray arg = ( NativeArray ) args [ i ] ; System . arraycopy ( arg . dense , 0 , denseResult . dense , cursor , ( int ) arg . length ) ; cursor += ( int ) arg . length ; } else { denseResult . dense [ cursor ++ ] = args [ i ] ; } } denseResult . length = length ; return result ; } } } long length ; long slot = 0 ; /* Put the target in the result array ; only add it as an array
* if it looks like one . */
if ( js_isArray ( thisObj ) ) { length = getLengthProperty ( cx , thisObj , false ) ; // Copy from the target object into the result
for ( slot = 0 ; slot < length ; slot ++ ) { Object temp = getRawElem ( thisObj , slot ) ; if ( temp != NOT_FOUND ) { defineElem ( cx , result , slot , temp ) ; } } } else { defineElem ( cx , result , slot ++ , thisObj ) ; } /* Copy from the arguments into the result . If any argument
* has a numeric length property , treat it as an array and add
* elements separately ; otherwise , just copy the argument . */
for ( int i = 0 ; i < args . length ; i ++ ) { if ( js_isArray ( args [ i ] ) ) { // js _ isArray = > instanceof Scriptable
Scriptable arg = ( Scriptable ) args [ i ] ; length = getLengthProperty ( cx , arg , false ) ; for ( long j = 0 ; j < length ; j ++ , slot ++ ) { Object temp = getRawElem ( arg , j ) ; if ( temp != NOT_FOUND ) { defineElem ( cx , result , slot , temp ) ; } } } else { defineElem ( cx , result , slot ++ , args [ i ] ) ; } } setLengthProperty ( cx , result , slot ) ; return result ; |
public class ExternalSpreadsheetCompiler { /** * Generates DRL from the input stream containing the spreadsheet .
* @ param xlsStream
* The stream to the spreadsheet . Uses the first worksheet found
* for the decision tables , ignores others .
* @ param type
* The type of the file - InputType . CSV or InputType . XLS
* @ param listener
* @ return DRL xml , ready for use in drools .
* @ throws IOException */
public String compile ( final InputStream xlsStream , final InputType type , final TemplateDataListener listener ) { } } | ArrayList < DataListener > listeners = new ArrayList < DataListener > ( ) ; listeners . add ( listener ) ; compile ( xlsStream , type , listeners ) ; return listener . renderDRL ( ) ; |
public class Objects { /** * Checks deep equality of two objects .
* @ param a an object
* @ param b an object
* @ return { @ code true } if objects are deeply equals , { @ code false } otherwise
* @ see Arrays # deepEquals ( Object [ ] , Object [ ] )
* @ see Objects # equals ( Object , Object )
* @ since 1.2.0 */
@ Contract ( pure = true ) public static boolean deepEquals ( @ Nullable Object a , @ Nullable Object b ) { } } | return ( a == b ) || ( a != null && b != null ) && Arrays . deepEquals ( new Object [ ] { a } , new Object [ ] { b } ) ; |
public class BccClient { /** * Releasing the specified volume owned by the user .
* You can release the specified volume only
* when the volume is Available / Expired / Error ,
* otherwise , it ' s will get < code > 409 < / code > errorCode .
* @ param request The request containing all options for releasing the specified volume . */
public void releaseVolume ( ReleaseVolumeRequest request ) { } } | checkNotNull ( request , "request should not be null." ) ; checkStringNotEmpty ( request . getVolumeId ( ) , "request volumeId should not be empty." ) ; InternalRequest internalRequest = this . createRequest ( request , HttpMethodName . DELETE , VOLUME_PREFIX , request . getVolumeId ( ) ) ; invokeHttpClient ( internalRequest , AbstractBceResponse . class ) ; |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.