signature stringlengths 43 39.1k | implementation stringlengths 0 450k |
|---|---|
public class HttpChain { /** * ChainEventListener method .
* This method can not be synchronized ( deadlock with update / stop ) .
* Rely on CFW synchronization of chain operations . */
@ Override public void chainStopped ( ChainData chainData ) { } } | final ActiveConfiguration cfg = currentConfig ; int oldState = chainState . getAndSet ( ChainState . STOPPED . val ) ; if ( oldState > ChainState . QUIESCED . val ) { quiesceChain ( ) ; } // Wake up anything waiting for the chain to stop
// ( see the update method for one example )
stopWait . notifyStopped ( ) ; // Post an endpoint stopped event to anyone listening
String topic = owner . getEventTopic ( ) + HttpServiceConstants . ENDPOINT_STOPPED ; postEvent ( topic , cfg , null ) ; cfg . clearActivePort ( ) ; |
public class XmlParser { /** * Parse XML from InputStream . */
public synchronized Node parse ( InputStream in ) throws IOException , SAXException { } } | // _ dtd = null ;
Handler handler = new Handler ( ) ; XMLReader reader = _parser . getXMLReader ( ) ; reader . setContentHandler ( handler ) ; reader . setErrorHandler ( handler ) ; reader . setEntityResolver ( handler ) ; _parser . parse ( new InputSource ( in ) , handler ) ; if ( handler . error != null ) throw handler . error ; Node root = ( Node ) handler . root ; handler . reset ( ) ; return root ; |
public class CommerceAccountOrganizationRelPersistenceImpl { /** * Returns an ordered range of all the commerce account organization rels where commerceAccountId = & # 63 ; .
* Useful when paginating results . Returns a maximum of < code > end - start < / code > instances . < code > start < / code > and < code > end < / code > are not primary keys , they are indexes in the result set . Thus , < code > 0 < / code > refers to the first result in the set . Setting both < code > start < / code > and < code > end < / code > to { @ link QueryUtil # ALL _ POS } will return the full result set . If < code > orderByComparator < / code > is specified , then the query will include the given ORDER BY logic . If < code > orderByComparator < / code > is absent and pagination is required ( < code > start < / code > and < code > end < / code > are not { @ link QueryUtil # ALL _ POS } ) , then the query will include the default ORDER BY logic from { @ link CommerceAccountOrganizationRelModelImpl } . If both < code > orderByComparator < / code > and pagination are absent , for performance reasons , the query will not have an ORDER BY clause and the returned result set will be sorted on by the primary key in an ascending order .
* @ param commerceAccountId the commerce account ID
* @ param start the lower bound of the range of commerce account organization rels
* @ param end the upper bound of the range of commerce account organization rels ( not inclusive )
* @ param orderByComparator the comparator to order the results by ( optionally < code > null < / code > )
* @ return the ordered range of matching commerce account organization rels */
@ Override public List < CommerceAccountOrganizationRel > findByCommerceAccountId ( long commerceAccountId , int start , int end , OrderByComparator < CommerceAccountOrganizationRel > orderByComparator ) { } } | return findByCommerceAccountId ( commerceAccountId , start , end , orderByComparator , true ) ; |
public class Follower { /** * Sends CEPOCH message to its prospective leader .
* @ throws IOException in case of IO failure . */
void sendProposedEpoch ( ) throws IOException { } } | Message message = MessageBuilder . buildProposedEpoch ( persistence . getProposedEpoch ( ) , persistence . getAckEpoch ( ) , persistence . getLastSeenConfig ( ) , getSyncTimeoutMs ( ) ) ; if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( "Sends {} to leader {}" , TextFormat . shortDebugString ( message ) , this . electedLeader ) ; } sendMessage ( this . electedLeader , message ) ; |
public class Attributes { /** * Get an attribute ' s value by case - insensitive key
* @ param key the attribute name
* @ return the first matching attribute value if set ; or empty string if not set ( ora boolean attribute ) . */
public String getIgnoreCase ( String key ) { } } | int i = indexOfKeyIgnoreCase ( key ) ; return i == NotFound ? EmptyString : checkNotNull ( vals [ i ] ) ; |
public class PrefsConfig { /** * Sets the value of the specified preference , overriding the value defined in the
* configuration files shipped with the application . */
public void setValue ( String name , String value ) { } } | String oldValue = getValue ( name , ( String ) null ) ; _prefs . put ( name , value ) ; _propsup . firePropertyChange ( name , oldValue , value ) ; |
public class VirtualNetworkGatewaysInner { /** * The Get VpnclientIpsecParameters operation retrieves information about the vpnclient ipsec policy for P2S client of virtual network gateway in the specified resource group through Network resource provider .
* @ param resourceGroupName The name of the resource group .
* @ param virtualNetworkGatewayName The virtual network gateway name .
* @ param serviceCallback the async ServiceCallback to handle successful and failed responses .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the { @ link ServiceFuture } object */
public ServiceFuture < VpnClientIPsecParametersInner > getVpnclientIpsecParametersAsync ( String resourceGroupName , String virtualNetworkGatewayName , final ServiceCallback < VpnClientIPsecParametersInner > serviceCallback ) { } } | return ServiceFuture . fromResponse ( getVpnclientIpsecParametersWithServiceResponseAsync ( resourceGroupName , virtualNetworkGatewayName ) , serviceCallback ) ; |
public class CPDefinitionSpecificationOptionValuePersistenceImpl { /** * Returns the first cp definition specification option value in the ordered set where CPDefinitionId = & # 63 ; and CPSpecificationOptionId = & # 63 ; .
* @ param CPDefinitionId the cp definition ID
* @ param CPSpecificationOptionId the cp specification option ID
* @ param orderByComparator the comparator to order the set by ( optionally < code > null < / code > )
* @ return the first matching cp definition specification option value
* @ throws NoSuchCPDefinitionSpecificationOptionValueException if a matching cp definition specification option value could not be found */
@ Override public CPDefinitionSpecificationOptionValue findByC_CSO_First ( long CPDefinitionId , long CPSpecificationOptionId , OrderByComparator < CPDefinitionSpecificationOptionValue > orderByComparator ) throws NoSuchCPDefinitionSpecificationOptionValueException { } } | CPDefinitionSpecificationOptionValue cpDefinitionSpecificationOptionValue = fetchByC_CSO_First ( CPDefinitionId , CPSpecificationOptionId , orderByComparator ) ; if ( cpDefinitionSpecificationOptionValue != null ) { return cpDefinitionSpecificationOptionValue ; } StringBundler msg = new StringBundler ( 6 ) ; msg . append ( _NO_SUCH_ENTITY_WITH_KEY ) ; msg . append ( "CPDefinitionId=" ) ; msg . append ( CPDefinitionId ) ; msg . append ( ", CPSpecificationOptionId=" ) ; msg . append ( CPSpecificationOptionId ) ; msg . append ( "}" ) ; throw new NoSuchCPDefinitionSpecificationOptionValueException ( msg . toString ( ) ) ; |
public class PhoneNumberUtil { /** * parse phone number .
* @ param pphoneNumber phone number as string
* @ param pcountryCode iso code of country
* @ param plocale locale to read properties in the correct language
* @ return PhoneNumberData */
public PhoneNumberData parsePhoneNumber ( final String pphoneNumber , final String pcountryCode , final Locale plocale ) { } } | return ( PhoneNumberData ) this . parsePhoneNumber ( pphoneNumber , new PhoneNumberData ( ) , CreatePhoneCountryConstantsClass . create ( plocale ) . countryMap ( ) . get ( StringUtils . defaultString ( pcountryCode ) ) ) ; |
public class InjectionTargetField { /** * F743-29174 */
@ Override protected void injectMember ( Object objectToInject , Object dependentObject ) throws Exception { } } | ivField . set ( objectToInject , dependentObject ) ; |
public class FindingReplacing { /** * Sets the substring in given left index and right index as the delegate string
* @ param leftIndex
* @ param rightIndex
* @ return */
public S setBetn ( int leftIndex , int rightIndex ) { } } | return set ( Indexer . of ( delegate . get ( ) ) . between ( leftIndex , rightIndex ) ) ; |
public class CommerceWarehouseItemUtil { /** * Returns the commerce warehouse items before and after the current commerce warehouse item in the ordered set where CProductId = & # 63 ; and CPInstanceUuid = & # 63 ; .
* @ param commerceWarehouseItemId the primary key of the current commerce warehouse item
* @ param CProductId the c product ID
* @ param CPInstanceUuid the cp instance uuid
* @ param orderByComparator the comparator to order the set by ( optionally < code > null < / code > )
* @ return the previous , current , and next commerce warehouse item
* @ throws NoSuchWarehouseItemException if a commerce warehouse item with the primary key could not be found */
public static CommerceWarehouseItem [ ] findByCPI_CPIU_PrevAndNext ( long commerceWarehouseItemId , long CProductId , String CPInstanceUuid , OrderByComparator < CommerceWarehouseItem > orderByComparator ) throws com . liferay . commerce . exception . NoSuchWarehouseItemException { } } | return getPersistence ( ) . findByCPI_CPIU_PrevAndNext ( commerceWarehouseItemId , CProductId , CPInstanceUuid , orderByComparator ) ; |
public class Slf4JLogger { /** * Log an error to the SLF4J Logger with < code > TRACE < / code > priority . */
public void trace ( Object message , Throwable t ) { } } | getLogger ( ) . trace ( message . toString ( ) , t ) ; |
public class TextMateGenerator2 { /** * Generate the rules for the comments .
* @ return the rules . */
protected List < Map < String , ? > > generateComments ( ) { } } | final List < Map < String , ? > > list = new ArrayList < > ( ) ; // Block comment
list . add ( pattern ( it -> { it . delimiters ( "(/\\*+)" , "(\\*/)" ) ; // $ NON - NLS - 1 $ / / $ NON - NLS - 2 $
it . style ( BLOCK_COMMENT_STYLE ) ; it . beginStyle ( BLOCK_COMMENT_DELIMITER_STYLE ) ; it . endStyle ( BLOCK_COMMENT_DELIMITER_STYLE ) ; it . pattern ( it2 -> { it2 . matches ( "^\\s*(\\*)(?!/)" ) ; // $ NON - NLS - 1 $
it2 . style ( BLOCK_COMMENT_DELIMITER_STYLE ) ; } ) ; it . comment ( "Multiline comments" ) ; // $ NON - NLS - 1 $
} ) ) ; // Line comment
list . add ( pattern ( it -> { it . matches ( "\\s*(//)(.*)$" ) ; // $ NON - NLS - 1 $
it . substyle ( 1 , LINE_COMMENT_DELIMITER_STYLE ) ; it . substyle ( 2 , LINE_COMMENT_STYLE ) ; it . comment ( "Single-line comment" ) ; // $ NON - NLS - 1 $
} ) ) ; return list ; |
public class DiscountCurveInterpolation { /** * Create a discount curve from given times and given zero rates using default interpolation and extrapolation methods .
* The discount factor is determined by
* < code >
* givenDiscountFactors [ timeIndex ] = Math . exp ( - givenZeroRates [ timeIndex ] * times [ timeIndex ] ) ;
* < / code >
* @ param name The name of this discount curve .
* @ param times Array of times as doubles .
* @ param givenZeroRates Array of corresponding zero rates .
* @ return A new discount factor object . */
public static DiscountCurveInterpolation createDiscountCurveFromZeroRates ( String name , double [ ] times , RandomVariable [ ] givenZeroRates ) { } } | RandomVariable [ ] givenDiscountFactors = new RandomVariable [ givenZeroRates . length ] ; for ( int timeIndex = 0 ; timeIndex < times . length ; timeIndex ++ ) { givenDiscountFactors [ timeIndex ] = givenZeroRates [ timeIndex ] . mult ( - times [ timeIndex ] ) . exp ( ) ; } return createDiscountCurveFromDiscountFactors ( name , times , givenDiscountFactors ) ; |
public class ChainGroupDataImpl { /** * Method removeChainEventListener . Removes a listener from the list
* of those being informed of chain events on this chain . The listener is also
* removed from each chain in the group unless the chain is in another group
* which is associated with the listener .
* @ param listener */
public final void removeChainEventListener ( ChainEventListener listener ) { } } | if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "removeChainEventListener: " + listener ) ; } if ( null != listener ) { // Remove the listener from the list monitored by this group .
if ( ! getChainEventListeners ( ) . remove ( listener ) ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "Listener " + listener + " was not found in list monitored by group " + getName ( ) ) ; } } // Remove the listener from each of the chains in this group .
for ( ChainData chain : getChains ( ) ) { removeListenerFromChain ( listener , chain ) ; } } |
public class fpGenericConstrainedInterface { /** * this method is really suspect , but will ignore this case */
@ Override public int compare ( GregorianCalendar c1 , GregorianCalendar c2 ) { } } | if ( c2 . getGregorianChange ( ) == null ) { return ( int ) ( c1 . getTime ( ) . getTime ( ) - c2 . getTime ( ) . getTime ( ) ) ; } else { return 0 ; } |
public class CmsSearchReplaceSettings { /** * Returns < code > true < / code > if Solr index is selected and a query was entered . < p >
* @ return < code > true < / code > if Solr index is selected and a query was entered */
public boolean isSolrSearch ( ) { } } | if ( VFS . equals ( m_source ) ) { // VFS search selected
return false ; } // index selected and query entered - - > Solr search else VFS
return ( CmsStringUtil . isNotEmptyOrWhitespaceOnly ( m_source ) && CmsStringUtil . isNotEmptyOrWhitespaceOnly ( m_query ) ) ; |
public class AmazonEC2Client { /** * Creates a data feed for Spot Instances , enabling you to view Spot Instance usage logs . You can create one data
* feed per AWS account . For more information , see < a
* href = " https : / / docs . aws . amazon . com / AWSEC2 / latest / UserGuide / spot - data - feeds . html " > Spot Instance Data Feed < / a > in
* the < i > Amazon EC2 User Guide for Linux Instances < / i > .
* @ param createSpotDatafeedSubscriptionRequest
* Contains the parameters for CreateSpotDatafeedSubscription .
* @ return Result of the CreateSpotDatafeedSubscription operation returned by the service .
* @ sample AmazonEC2 . CreateSpotDatafeedSubscription
* @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / ec2-2016-11-15 / CreateSpotDatafeedSubscription "
* target = " _ top " > AWS API Documentation < / a > */
@ Override public CreateSpotDatafeedSubscriptionResult createSpotDatafeedSubscription ( CreateSpotDatafeedSubscriptionRequest request ) { } } | request = beforeClientExecution ( request ) ; return executeCreateSpotDatafeedSubscription ( request ) ; |
public class HttpHandler { /** * Sets attributes of the request that are common to all requests for this handler . */
@ Override public void initialize ( HttpRequest httpRequest ) throws IOException { } } | // Do not throw if execute fails , since Axis will handle unmarshalling the
// fault .
httpRequest . setThrowExceptionOnExecuteError ( false ) ; // For consistency with the default Axis HTTPSender and CommonsHTTPSender , do not
// follow redirects .
httpRequest . setFollowRedirects ( false ) ; // Retry should be handled by the client .
httpRequest . setNumberOfRetries ( 0 ) ; |
public class ConnectivityStateManager { /** * Connectivity state is changed to the specified value . Will trigger some notifications that have
* been registered earlier by { @ link ManagedChannel # notifyWhenStateChanged } . */
void gotoState ( @ Nonnull ConnectivityState newState ) { } } | checkNotNull ( newState , "newState" ) ; if ( state != newState && state != ConnectivityState . SHUTDOWN ) { state = newState ; if ( listeners . isEmpty ( ) ) { return ; } // Swap out callback list before calling them , because a callback may register new callbacks ,
// if run in direct executor , can cause ConcurrentModificationException .
ArrayList < Listener > savedListeners = listeners ; listeners = new ArrayList < > ( ) ; for ( Listener listener : savedListeners ) { listener . runInExecutor ( ) ; } } |
public class BosClient { /** * Uploads the specified string to Bos under the specified bucket and key name .
* @ param bucketName The name of an existing bucket , to which you have Write permission .
* @ param key The key under which to store the specified file .
* @ param value The string containing the value to be uploaded to Bos .
* @ return A PutObjectResponse object containing the information returned by Bos for the newly created object . */
public PutObjectResponse putObject ( String bucketName , String key , String value ) { } } | try { return this . putObject ( bucketName , key , value . getBytes ( DEFAULT_ENCODING ) , new ObjectMetadata ( ) ) ; } catch ( UnsupportedEncodingException e ) { throw new BceClientException ( "Fail to get bytes." , e ) ; } |
public class Validator { /** * Where to find the schema . */
public void setSchemaSources ( Source ... s ) { } } | if ( s != null ) { sourceLocations = Arrays . copyOf ( s , s . length ) ; } else { sourceLocations = null ; } |
public class MviConductorLifecycleListener { /** * Determines whether or not a Presenter Instance should be kept
* @ param keepPresenterInstance true , if the delegate has enabled keep */
static boolean retainPresenterInstance ( boolean keepPresenterInstance , Controller controller ) { } } | return keepPresenterInstance && ( controller . getActivity ( ) . isChangingConfigurations ( ) || ! controller . getActivity ( ) . isFinishing ( ) ) && ! controller . isBeingDestroyed ( ) ; |
public class Connection { public String getFullTangoHost ( ) throws DevFailed { } } | // Get the tango _ host
String tangoHost = get_tango_host ( ) ; String host = tangoHost . substring ( 0 , tangoHost . indexOf ( ':' ) ) ; String port = tangoHost . substring ( tangoHost . indexOf ( ':' ) ) ; host = TangoUrl . getCanonicalName ( host ) ; return host + port ; |
public class J2CSecurityHelper { /** * This method validates whether the user security name provided by the CallerPrincipalCallback and
* the PasswordValidationCallback match . It does this check only in case the CallerPrincipalCallback
* was invoked prior to the current invocation of the PasswordValidationCallback .
* @ param credData The custom hashtable that will be used for login
* @ param securityName The user security name provided by the callback
* @ param isInvoked An Enum that represents whether either the CallerPrincipalCallback was invoked prior to this invocation .
* @ return boolean indicating whether the validation succeeded or failed */
private static boolean validateCallbackInformation ( Hashtable < String , Object > credData , String securityName , Invocation isInvoked ) { } } | boolean status = true ; if ( isInvoked == Invocation . CALLERPRINCIPALCALLBACK ) { String existingName = ( String ) credData . get ( AttributeNameConstants . WSCREDENTIAL_SECURITYNAME ) ; if ( existingName != null && ! ( existingName . equals ( securityName ) ) ) { status = false ; Tr . error ( tc , "CALLBACK_SECURITY_NAME_MISMATCH_J2CA0675" , new Object [ ] { securityName , existingName } ) ; } } return status ; |
public class ModelRegistryService { /** * Returns true if the class with the given class name contained in the given bundle is a model and false if not or
* the class couldn ' t be loaded . */
private boolean isModelClass ( String classname , Bundle bundle ) { } } | LOGGER . debug ( "Check if class '{}' is a model class" , classname ) ; Class < ? > clazz ; try { clazz = bundle . loadClass ( classname ) ; } catch ( ClassNotFoundException e ) { LOGGER . warn ( "Bundle could not load its own class: '{}' bundle: '{}'" , classname , bundle . getSymbolicName ( ) ) ; LOGGER . debug ( "Exact error: " , e ) ; return false ; } catch ( NoClassDefFoundError e ) { // ignore since this happens if bundle have optional imports
return false ; } catch ( Error e ) { // if this catch clause is reached , then the bundle which caused this error need to be checked . There
// is something wrong with the setup of the bundle ( e . g . double import of a library of different versions )
LOGGER . warn ( "Error while loading class: '{}' in bundle: '{}'" , classname , bundle . getSymbolicName ( ) ) ; LOGGER . debug ( "Exact error: " , e ) ; return false ; } return clazz . isAnnotationPresent ( Model . class ) ; |
public class Flowable { /** * Returns a Flowable that emits windows of items it collects from the source Publisher . The resulting
* Publisher emits connected , non - overlapping windows , each of a fixed duration specified by the
* { @ code timespan } argument or a maximum size specified by the { @ code count } argument ( whichever is reached
* first ) . When the source Publisher completes or encounters an error , the resulting Publisher emits the
* current window and propagates the notification from the source Publisher .
* < img width = " 640 " height = " 370 " src = " https : / / raw . github . com / wiki / ReactiveX / RxJava / images / rx - operators / window6 . s . png " alt = " " >
* < dl >
* < dt > < b > Backpressure : < / b > < / dt >
* < dd > The operator consumes the source { @ code Publisher } in an unbounded manner .
* The returned { @ code Publisher } doesn ' t support backpressure as it uses
* time to control the creation of windows . The returned inner { @ code Publisher } s honor
* backpressure and may hold up to { @ code count } elements at most . < / dd >
* < dt > < b > Scheduler : < / b > < / dt >
* < dd > You specify which { @ link Scheduler } this operator will use . < / dd >
* < / dl >
* @ param timespan
* the period of time each window collects items before it should be emitted and replaced with a
* new window
* @ param unit
* the unit of time which applies to the { @ code timespan } argument
* @ param count
* the maximum size of each window before it should be emitted
* @ param scheduler
* the { @ link Scheduler } to use when determining the end and start of a window
* @ param restart
* if true , when a window reaches the capacity limit , the timer is restarted as well
* @ param bufferSize
* the capacity hint for the buffer in the inner windows
* @ return a Flowable that emits connected , non - overlapping windows of items from the source Publisher
* that were emitted during a fixed duration of time or when the window has reached maximum capacity
* ( whichever occurs first )
* @ see < a href = " http : / / reactivex . io / documentation / operators / window . html " > ReactiveX operators documentation : Window < / a > */
@ CheckReturnValue @ BackpressureSupport ( BackpressureKind . ERROR ) @ SchedulerSupport ( SchedulerSupport . CUSTOM ) public final Flowable < Flowable < T > > window ( long timespan , TimeUnit unit , Scheduler scheduler , long count , boolean restart , int bufferSize ) { } } | ObjectHelper . verifyPositive ( bufferSize , "bufferSize" ) ; ObjectHelper . requireNonNull ( scheduler , "scheduler is null" ) ; ObjectHelper . requireNonNull ( unit , "unit is null" ) ; ObjectHelper . verifyPositive ( count , "count" ) ; return RxJavaPlugins . onAssembly ( new FlowableWindowTimed < T > ( this , timespan , timespan , unit , scheduler , count , bufferSize , restart ) ) ; |
public class CronCalendar { /** * Determines whether the given time ( in milliseconds ) is ' included ' by the
* < CODE > BaseCalendar < / CODE >
* @ param timeInMillis
* the date / time to test
* @ return a boolean indicating whether the specified time is ' included ' by
* the < CODE > CronCalendar < / CODE > */
@ Override public boolean isTimeIncluded ( final long timeInMillis ) { } } | if ( getBaseCalendar ( ) != null && ! getBaseCalendar ( ) . isTimeIncluded ( timeInMillis ) ) { return false ; } return ! m_aCronExpression . isSatisfiedBy ( new Date ( timeInMillis ) ) ; |
public class Engine { /** * This method is an < i > alias < / i > for the { @ link # evolve ( EvolutionStart ) }
* method .
* @ since 3.1 */
@ Override public EvolutionResult < G , C > apply ( final EvolutionStart < G , C > start ) { } } | return evolve ( start ) ; |
public class LTPAKeyFileUtilityImpl { /** * Write the LTPA key properties to the given OutputStream . This method
* will close the OutputStream .
* @ param keyImportFile The import file to be created
* @ param ltpaProps The properties containing the LTPA keys
* @ throws TokenException
* @ throws IOException */
protected void addLTPAKeysToFile ( OutputStream os , Properties ltpaProps ) throws Exception { } } | try { // Write the ltpa key propeperties to
ltpaProps . store ( os , null ) ; } catch ( IOException e ) { throw e ; } finally { if ( os != null ) try { os . close ( ) ; } catch ( IOException e ) { } } return ; |
public class CommerceUserSegmentEntryPersistenceImpl { /** * Removes the commerce user segment entry with the primary key from the database . Also notifies the appropriate model listeners .
* @ param primaryKey the primary key of the commerce user segment entry
* @ return the commerce user segment entry that was removed
* @ throws NoSuchUserSegmentEntryException if a commerce user segment entry with the primary key could not be found */
@ Override public CommerceUserSegmentEntry remove ( Serializable primaryKey ) throws NoSuchUserSegmentEntryException { } } | Session session = null ; try { session = openSession ( ) ; CommerceUserSegmentEntry commerceUserSegmentEntry = ( CommerceUserSegmentEntry ) session . get ( CommerceUserSegmentEntryImpl . class , primaryKey ) ; if ( commerceUserSegmentEntry == null ) { if ( _log . isDebugEnabled ( ) ) { _log . debug ( _NO_SUCH_ENTITY_WITH_PRIMARY_KEY + primaryKey ) ; } throw new NoSuchUserSegmentEntryException ( _NO_SUCH_ENTITY_WITH_PRIMARY_KEY + primaryKey ) ; } return remove ( commerceUserSegmentEntry ) ; } catch ( NoSuchUserSegmentEntryException nsee ) { throw nsee ; } catch ( Exception e ) { throw processException ( e ) ; } finally { closeSession ( session ) ; } |
public class StringAttributeConstraintsTypeMarshaller { /** * Marshall the given parameter object . */
public void marshall ( StringAttributeConstraintsType stringAttributeConstraintsType , ProtocolMarshaller protocolMarshaller ) { } } | if ( stringAttributeConstraintsType == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( stringAttributeConstraintsType . getMinLength ( ) , MINLENGTH_BINDING ) ; protocolMarshaller . marshall ( stringAttributeConstraintsType . getMaxLength ( ) , MAXLENGTH_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class LogBuffer { /** * Return 48 - bit signed long from buffer . ( little - endian )
* @ see mysql - 5.1.60 / include / my _ global . h - sint6korr */
public final long getLong48 ( final int pos ) { } } | final int position = origin + pos ; if ( pos + 5 >= limit || pos < 0 ) throw new IllegalArgumentException ( "limit excceed: " + ( pos < 0 ? pos : ( pos + 5 ) ) ) ; byte [ ] buf = buffer ; return ( ( long ) ( 0xff & buf [ position ] ) ) | ( ( long ) ( 0xff & buf [ position + 1 ] ) << 8 ) | ( ( long ) ( 0xff & buf [ position + 2 ] ) << 16 ) | ( ( long ) ( 0xff & buf [ position + 3 ] ) << 24 ) | ( ( long ) ( 0xff & buf [ position + 4 ] ) << 32 ) | ( ( long ) ( buf [ position + 5 ] ) << 40 ) ; |
public class MetaBuilder { /** * 文档参考 :
* http : / / dev . mysql . com / doc / connector - j / en / connector - j - reference - type - conversions . html
* JDBC 与时间有关类型转换规则 , mysql 类型到 java 类型如下对应关系 :
* DATEjava . sql . Date
* DATETIMEjava . sql . Timestamp
* TIMESTAMP [ ( M ) ] java . sql . Timestamp
* TIMEjava . sql . Time
* 对数据库的 DATE 、 DATETIME 、 TIMESTAMP 、 TIME 四种类型注入 new java . util . Date ( ) 对象保存到库以后可以达到 “ 秒精度 ”
* 为了便捷性 , getter 、 setter 方法中对上述四种字段类型采用 java . util . Date , 可通过定制 TypeMapping 改变此映射规则 */
protected void buildColumnMetas ( TableMeta tableMeta ) throws SQLException { } } | String sql = dialect . forTableBuilderDoBuild ( tableMeta . name ) ; Statement stm = conn . createStatement ( ) ; ResultSet rs = stm . executeQuery ( sql ) ; ResultSetMetaData rsmd = rs . getMetaData ( ) ; int columnCount = rsmd . getColumnCount ( ) ; Map < String , ColumnMeta > columnMetaMap = new HashMap < > ( ) ; if ( generateRemarks ) { DatabaseMetaData dbMeta = conn . getMetaData ( ) ; ResultSet colMetaRs = null ; try { colMetaRs = dbMeta . getColumns ( null , null , tableMeta . name , null ) ; while ( colMetaRs . next ( ) ) { ColumnMeta columnMeta = new ColumnMeta ( ) ; columnMeta . name = colMetaRs . getString ( "COLUMN_NAME" ) ; columnMeta . remarks = colMetaRs . getString ( "REMARKS" ) ; columnMetaMap . put ( columnMeta . name , columnMeta ) ; } } catch ( Exception e ) { System . out . println ( "无法生成 REMARKS" ) ; } finally { if ( colMetaRs != null ) { colMetaRs . close ( ) ; } } } for ( int i = 1 ; i <= columnCount ; i ++ ) { ColumnMeta cm = new ColumnMeta ( ) ; cm . name = rsmd . getColumnName ( i ) ; String typeStr = null ; if ( dialect . isKeepByteAndShort ( ) ) { int type = rsmd . getColumnType ( i ) ; if ( type == Types . TINYINT ) { typeStr = "java.lang.Byte" ; } else if ( type == Types . SMALLINT ) { typeStr = "java.lang.Short" ; } } if ( typeStr == null ) { String colClassName = rsmd . getColumnClassName ( i ) ; typeStr = typeMapping . getType ( colClassName ) ; } if ( typeStr == null ) { int type = rsmd . getColumnType ( i ) ; if ( type == Types . BINARY || type == Types . VARBINARY || type == Types . LONGVARBINARY || type == Types . BLOB ) { typeStr = "byte[]" ; } else if ( type == Types . CLOB || type == Types . NCLOB ) { typeStr = "java.lang.String" ; } // 支持 oracle 的 TIMESTAMP 、 DATE 字段类型 , 其中 Types . DATE 值并不会出现
// 保留对 Types . DATE 的判断 , 一是为了逻辑上的正确性 、 完备性 , 二是其它类型的数据库可能用得着
else if ( type == Types . TIMESTAMP || type == Types . DATE ) { typeStr = "java.util.Date" ; } // 支持 PostgreSql 的 jsonb json
else if ( type == Types . OTHER ) { typeStr = "java.lang.Object" ; } else { typeStr = "java.lang.String" ; } } typeStr = handleJavaType ( typeStr , rsmd , i ) ; cm . javaType = typeStr ; // 构造字段对应的属性名 attrName
cm . attrName = buildAttrName ( cm . name ) ; // 备注字段赋值
if ( generateRemarks && columnMetaMap . containsKey ( cm . name ) ) { cm . remarks = columnMetaMap . get ( cm . name ) . remarks ; } tableMeta . columnMetas . add ( cm ) ; } rs . close ( ) ; stm . close ( ) ; |
public class CPDefinitionPersistenceImpl { /** * Removes the cp definition with the primary key from the database . Also notifies the appropriate model listeners .
* @ param primaryKey the primary key of the cp definition
* @ return the cp definition that was removed
* @ throws NoSuchCPDefinitionException if a cp definition with the primary key could not be found */
@ Override public CPDefinition remove ( Serializable primaryKey ) throws NoSuchCPDefinitionException { } } | Session session = null ; try { session = openSession ( ) ; CPDefinition cpDefinition = ( CPDefinition ) session . get ( CPDefinitionImpl . class , primaryKey ) ; if ( cpDefinition == null ) { if ( _log . isDebugEnabled ( ) ) { _log . debug ( _NO_SUCH_ENTITY_WITH_PRIMARY_KEY + primaryKey ) ; } throw new NoSuchCPDefinitionException ( _NO_SUCH_ENTITY_WITH_PRIMARY_KEY + primaryKey ) ; } return remove ( cpDefinition ) ; } catch ( NoSuchCPDefinitionException nsee ) { throw nsee ; } catch ( Exception e ) { throw processException ( e ) ; } finally { closeSession ( session ) ; } |
public class Flowable { /** * Converts an arbitrary Reactive - Streams Publisher into a Flowable if not already a
* Flowable .
* The { @ link Publisher } must follow the
* < a href = " https : / / github . com / reactive - streams / reactive - streams - jvm # reactive - streams " > Reactive - Streams specification < / a > .
* Violating the specification may result in undefined behavior .
* If possible , use { @ link # create ( FlowableOnSubscribe , BackpressureStrategy ) } to create a
* source - like { @ code Flowable } instead .
* Note that even though { @ link Publisher } appears to be a functional interface , it
* is not recommended to implement it through a lambda as the specification requires
* state management that is not achievable with a stateless lambda .
* < dl >
* < dt > < b > Backpressure : < / b > < / dt >
* < dd > The operator is a pass - through for backpressure and its behavior is determined by the
* backpressure behavior of the wrapped publisher . < / dd >
* < dt > < b > Scheduler : < / b > < / dt >
* < dd > { @ code fromPublisher } does not operate by default on a particular { @ link Scheduler } . < / dd >
* < / dl >
* @ param < T > the value type of the flow
* @ param source the Publisher to convert
* @ return the new Flowable instance
* @ throws NullPointerException if the { @ code source } { @ code Publisher } is null
* @ see # create ( FlowableOnSubscribe , BackpressureStrategy ) */
@ CheckReturnValue @ BackpressureSupport ( BackpressureKind . PASS_THROUGH ) @ SchedulerSupport ( SchedulerSupport . NONE ) @ SuppressWarnings ( "unchecked" ) public static < T > Flowable < T > fromPublisher ( final Publisher < ? extends T > source ) { } } | if ( source instanceof Flowable ) { return RxJavaPlugins . onAssembly ( ( Flowable < T > ) source ) ; } ObjectHelper . requireNonNull ( source , "publisher is null" ) ; return RxJavaPlugins . onAssembly ( new FlowableFromPublisher < T > ( source ) ) ; |
public class GSCHImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
@ Override public Object eGet ( int featureID , boolean resolve , boolean coreType ) { } } | switch ( featureID ) { case AfplibPackage . GSCH__HX : return getHX ( ) ; case AfplibPackage . GSCH__HY : return getHY ( ) ; } return super . eGet ( featureID , resolve , coreType ) ; |
public class StringPath { /** * Method to construct the less than or equals expression for string
* @ param value the string value
* @ return Expression */
public Expression < String > lte ( String value ) { } } | String valueString = "'" + value + "'" ; return new Expression < String > ( this , Operation . lte , valueString ) ; |
public class AvroUtils { /** * Returns either the default { @ link AvroUtils } which throw an exception in cases where Avro
* would be needed or loads the specific utils for Avro from flink - avro . */
public static AvroUtils getAvroUtils ( ) { } } | // try and load the special AvroUtils from the flink - avro package
try { Class < ? > clazz = Class . forName ( AVRO_KRYO_UTILS , false , Thread . currentThread ( ) . getContextClassLoader ( ) ) ; return clazz . asSubclass ( AvroUtils . class ) . getConstructor ( ) . newInstance ( ) ; } catch ( ClassNotFoundException e ) { // cannot find the utils , return the default implementation
return new DefaultAvroUtils ( ) ; } catch ( Exception e ) { throw new RuntimeException ( "Could not instantiate " + AVRO_KRYO_UTILS + "." , e ) ; } |
public class AggregatorImpl { /** * Returns an opened ServiceTracker for the Aggregator options . Aggregator options
* are created by the bundle activator and are shared by all Aggregator instances
* created from the same bundle .
* @ param bundleContext The contributing bundle context
* @ return The opened service tracker
* @ throws InvalidSyntaxException */
protected ServiceTracker < IOptions , ? > getOptionsServiceTracker ( BundleContext bundleContext ) throws InvalidSyntaxException { } } | ServiceTracker < IOptions , ? > tracker = new ServiceTracker < IOptions , Object > ( bundleContext , bundleContext . createFilter ( "(&(" + Constants . OBJECTCLASS + "=" + IOptions . class . getName ( ) + // $ NON - NLS - 1 $ / / $ NON - NLS - 2 $
")(name=" + getServletBundleName ( ) + "))" ) , // $ NON - NLS - 1 $ / / $ NON - NLS - 2 $
null ) ; tracker . open ( ) ; return tracker ; |
public class BaseTraceService { /** * Escape \ b , \ f , \ n , \ r , \ t , " , \ , / characters and appends to a string builder
* @ param sb String builder to append to
* @ param s String to escape */
private void jsonEscape ( StringBuilder sb , String s ) { } } | if ( s == null ) { sb . append ( s ) ; return ; } for ( int i = 0 ; i < s . length ( ) ; i ++ ) { char c = s . charAt ( i ) ; switch ( c ) { case '\b' : sb . append ( "\\b" ) ; break ; case '\f' : sb . append ( "\\f" ) ; break ; case '\n' : sb . append ( "\\n" ) ; break ; case '\r' : sb . append ( "\\r" ) ; break ; case '\t' : sb . append ( "\\t" ) ; break ; // Fall through because we just need to add \ ( escaped ) before the character
case '\\' : case '\"' : case '/' : sb . append ( "\\" ) ; sb . append ( c ) ; break ; default : sb . append ( c ) ; } } |
public class AbstractBcAsymmetricCipherFactory { /** * Helper function to create supported key size arrays .
* @ param minSize minimum size supported .
* @ param maxSize maximum size supported .
* @ param step intermediate step supported .
* @ return an array of sizes . */
protected static int [ ] newKeySizeArray ( int minSize , int maxSize , int step ) { } } | int [ ] result = new int [ ( ( maxSize - minSize ) / step ) + 1 ] ; for ( int i = minSize , j = 0 ; i <= maxSize ; i += step , j ++ ) { result [ j ] = i ; } return result ; |
public class TaintConfig { /** * Loads summaries from stream checking the format
* @ param input input stream of configured summaries
* @ param checkRewrite whether to check duplicit summaries
* @ throws IOException if cannot read the stream or the format is bad
* @ throws IllegalArgumentException for bad method format
* @ throws IllegalStateException if there are duplicit configurations */
public void load ( InputStream input , final boolean checkRewrite ) throws IOException { } } | new TaintConfigLoader ( ) . load ( input , new TaintConfigLoader . TaintConfigReceiver ( ) { @ Override public void receiveTaintConfig ( String typeSignature , String config ) throws IOException { if ( TaintMethodConfig . accepts ( typeSignature , config ) ) { if ( checkRewrite && containsKey ( typeSignature ) ) { throw new IllegalStateException ( "Config for " + typeSignature + " already loaded" ) ; } TaintMethodConfig taintMethodConfig = new TaintMethodConfig ( true ) . load ( config ) ; taintMethodConfig . setTypeSignature ( typeSignature ) ; put ( typeSignature , taintMethodConfig ) ; return ; } if ( TaintClassConfig . accepts ( typeSignature , config ) ) { if ( checkRewrite && taintClassConfigMap . containsKey ( typeSignature ) ) { throw new IllegalStateException ( "Config for " + typeSignature + " already loaded" ) ; } TaintClassConfig taintClassConfig = new TaintClassConfig ( ) . load ( config ) ; taintClassConfigMap . put ( typeSignature , taintClassConfig ) ; return ; } if ( TaintMethodConfigWithArgumentsAndLocation . accepts ( typeSignature , config ) ) { if ( checkRewrite && taintMethodConfigWithArgumentsAndLocationMap . containsKey ( typeSignature ) ) { throw new IllegalStateException ( "Config for " + typeSignature + " already loaded" ) ; } TaintMethodConfigWithArgumentsAndLocation methodConfig = new TaintMethodConfigWithArgumentsAndLocation ( ) . load ( config ) ; methodConfig . setTypeSignature ( typeSignature ) ; String key = typeSignature + '@' + methodConfig . getLocation ( ) ; taintMethodConfigWithArgumentsAndLocationMap . put ( key , methodConfig ) ; return ; } throw new IllegalArgumentException ( "Invalid full method name " + typeSignature + " configured" ) ; } } ) ; |
public class UpdateNFSFileShareRequest { /** * The list of clients that are allowed to access the file gateway . The list must contain either valid IP addresses
* or valid CIDR blocks .
* @ return The list of clients that are allowed to access the file gateway . The list must contain either valid IP
* addresses or valid CIDR blocks . */
public java . util . List < String > getClientList ( ) { } } | if ( clientList == null ) { clientList = new com . amazonaws . internal . SdkInternalList < String > ( ) ; } return clientList ; |
public class ThreadDumps { /** * Prints the { @ link ThreadInfo } ( because { @ link ThreadInfo # toString ( ) } caps out the stack trace at 8 frames )
* @ param writer the writer to print to .
* @ param t the thread to print
* @ param mbean the { @ link ThreadMXBean } to use . */
@ edu . umd . cs . findbugs . annotations . SuppressWarnings ( value = { } } | "VA_FORMAT_STRING_USES_NEWLINE" } , justification = "We don't want platform specific" ) public static void printThreadInfo ( PrintWriter writer , ThreadInfo t , ThreadMXBean mbean ) { long cpuPercentage ; try { long cpuTime = mbean . getThreadCpuTime ( t . getThreadId ( ) ) ; long threadUserTime = mbean . getThreadUserTime ( t . getThreadId ( ) ) ; cpuPercentage = ( cpuTime == 0 ) ? 0 : 100 * threadUserTime / cpuTime ; } catch ( UnsupportedOperationException x ) { SupportLogFormatter . printStackTrace ( x , writer ) ; cpuPercentage = 0 ; } writer . printf ( "\"%s\" id=%d (0x%x) state=%s cpu=%d%%" , t . getThreadName ( ) , t . getThreadId ( ) , t . getThreadId ( ) , t . getThreadState ( ) , cpuPercentage ) ; final LockInfo lock = t . getLockInfo ( ) ; if ( lock != null && t . getThreadState ( ) != Thread . State . BLOCKED ) { writer . printf ( "\n - waiting on <0x%08x> (a %s)" , lock . getIdentityHashCode ( ) , lock . getClassName ( ) ) ; writer . printf ( "\n - locked <0x%08x> (a %s)" , lock . getIdentityHashCode ( ) , lock . getClassName ( ) ) ; } else if ( lock != null && t . getThreadState ( ) == Thread . State . BLOCKED ) { writer . printf ( "\n - waiting to lock <0x%08x> (a %s)" , lock . getIdentityHashCode ( ) , lock . getClassName ( ) ) ; } if ( t . isSuspended ( ) ) { writer . print ( " (suspended)" ) ; } if ( t . isInNative ( ) ) { writer . print ( " (running in native)" ) ; } writer . println ( ) ; if ( t . getLockOwnerName ( ) != null ) { writer . printf ( " owned by \"%s\" id=%d (0x%x)\n" , t . getLockOwnerName ( ) , t . getLockOwnerId ( ) , t . getLockOwnerId ( ) ) ; } final StackTraceElement [ ] elements = t . getStackTrace ( ) ; final MonitorInfo [ ] monitors = t . getLockedMonitors ( ) ; for ( int i = 0 ; i < elements . length ; i ++ ) { final StackTraceElement element = elements [ i ] ; writer . printf ( " at %s\n" , element ) ; for ( int j = 1 ; j < monitors . length ; j ++ ) { final MonitorInfo monitor = monitors [ j ] ; if ( monitor . getLockedStackDepth ( ) == i ) { writer . printf ( " - locked %s\n" , monitor ) ; } } } writer . println ( ) ; final LockInfo [ ] locks = t . getLockedSynchronizers ( ) ; if ( locks . length > 0 ) { writer . printf ( " Locked synchronizers: count = %d\n" , locks . length ) ; for ( LockInfo l : locks ) { writer . printf ( " - %s\n" , l ) ; } writer . println ( ) ; } |
public class RawScale2x { /** * Process filter .
* @ param srcImage The image source .
* @ param dstImage The image destination .
* @ param x The location x .
* @ param y The location y . */
private void process ( int [ ] srcImage , int [ ] dstImage , int x , int y ) { } } | final int b = getSourcePixel ( srcImage , x , y - 1 ) ; final int d = getSourcePixel ( srcImage , x - 1 , y ) ; final int e = getSourcePixel ( srcImage , x , y ) ; final int f = getSourcePixel ( srcImage , x + 1 , y ) ; final int h = getSourcePixel ( srcImage , x , y + 1 ) ; int e0 = e ; int e1 = e ; int e2 = e ; int e3 = e ; if ( b != h && d != f ) { e0 = d == b ? d : e ; e1 = b == f ? f : e ; e2 = d == h ? d : e ; e3 = h == f ? f : e ; } setDestPixel ( dstImage , x * SCALE , y * SCALE , e0 ) ; setDestPixel ( dstImage , x * SCALE + 1 , y * SCALE , e1 ) ; setDestPixel ( dstImage , x * SCALE , y * SCALE + 1 , e2 ) ; setDestPixel ( dstImage , x * SCALE + 1 , y * SCALE + 1 , e3 ) ; |
public class CheckAccessControls { /** * Returns the type that best represents the instance type for { @ code type } .
* < ul >
* < li > Prototype type = > The instance type having that prototype
* < li > Instance type = > The type
* < li > Constructor type = > The type that constructor instantiates
* < li > Object - literal type = > The type
* < / ul > */
@ Nullable private static ObjectType instanceTypeFor ( JSType type ) { } } | if ( type == null ) { return null ; } else if ( type . isUnionType ( ) ) { return null ; // A union has no meaningful instance type .
} else if ( type . isInstanceType ( ) || type . isUnknownType ( ) ) { return type . toMaybeObjectType ( ) ; } else if ( type . isConstructor ( ) || type . isInterface ( ) ) { return type . toMaybeFunctionType ( ) . getInstanceType ( ) ; } else if ( type . isFunctionType ( ) ) { return null ; // Functions that aren ' t ctors or interfaces have no instance type .
} else if ( type . isFunctionPrototypeType ( ) ) { return instanceTypeFor ( type . toMaybeObjectType ( ) . getOwnerFunction ( ) ) ; } return type . toMaybeObjectType ( ) ; |
public class FacesImpl { /** * Identify unknown faces from a person group .
* @ param personGroupId PersonGroupId of the target person group , created by PersonGroups . Create
* @ param faceIds Array of query faces faceIds , created by the Face - Detect . Each of the faces are identified independently . The valid number of faceIds is between [ 1 , 10 ] .
* @ param maxNumOfCandidatesReturned The range of maxNumOfCandidatesReturned is between 1 and 5 ( default is 1 ) .
* @ param confidenceThreshold Confidence threshold of identification , used to judge whether one face belong to one person . The range of confidenceThreshold is [ 0 , 1 ] ( default specified by algorithm ) .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the observable to the List & lt ; IdentifyResult & gt ; object */
public Observable < ServiceResponse < List < IdentifyResult > > > identifyWithServiceResponseAsync ( String personGroupId , List < UUID > faceIds , Integer maxNumOfCandidatesReturned , Double confidenceThreshold ) { } } | if ( this . client . azureRegion ( ) == null ) { throw new IllegalArgumentException ( "Parameter this.client.azureRegion() is required and cannot be null." ) ; } if ( personGroupId == null ) { throw new IllegalArgumentException ( "Parameter personGroupId is required and cannot be null." ) ; } if ( faceIds == null ) { throw new IllegalArgumentException ( "Parameter faceIds is required and cannot be null." ) ; } Validator . validate ( faceIds ) ; IdentifyRequest bodyParameter = new IdentifyRequest ( ) ; bodyParameter . withPersonGroupId ( personGroupId ) ; bodyParameter . withFaceIds ( faceIds ) ; bodyParameter . withMaxNumOfCandidatesReturned ( maxNumOfCandidatesReturned ) ; bodyParameter . withConfidenceThreshold ( confidenceThreshold ) ; String parameterizedHost = Joiner . on ( ", " ) . join ( "{AzureRegion}" , this . client . azureRegion ( ) ) ; return service . identify ( this . client . acceptLanguage ( ) , bodyParameter , parameterizedHost , this . client . userAgent ( ) ) . flatMap ( new Func1 < Response < ResponseBody > , Observable < ServiceResponse < List < IdentifyResult > > > > ( ) { @ Override public Observable < ServiceResponse < List < IdentifyResult > > > call ( Response < ResponseBody > response ) { try { ServiceResponse < List < IdentifyResult > > clientResponse = identifyDelegate ( response ) ; return Observable . just ( clientResponse ) ; } catch ( Throwable t ) { return Observable . error ( t ) ; } } } ) ; |
public class ClassDescriptor { /** * adds a FIELDDESCRIPTOR to this ClassDescriptor .
* @ param fld */
public void addFieldDescriptor ( FieldDescriptor fld ) { } } | fld . setClassDescriptor ( this ) ; // BRJ
if ( m_FieldDescriptions == null ) { m_FieldDescriptions = new FieldDescriptor [ 1 ] ; m_FieldDescriptions [ 0 ] = fld ; } else { int size = m_FieldDescriptions . length ; FieldDescriptor [ ] tmpArray = new FieldDescriptor [ size + 1 ] ; System . arraycopy ( m_FieldDescriptions , 0 , tmpArray , 0 , size ) ; tmpArray [ size ] = fld ; m_FieldDescriptions = tmpArray ; // 2 . Sort fields according to their getOrder ( ) Property
Arrays . sort ( m_FieldDescriptions , FieldDescriptor . getComparator ( ) ) ; } m_fieldDescriptorNameMap = null ; m_PkFieldDescriptors = null ; m_nonPkFieldDescriptors = null ; m_lockingFieldDescriptors = null ; m_RwFieldDescriptors = null ; m_RwNonPkFieldDescriptors = null ; |
public class xen_upgrade { /** * < pre >
* Converts API response of bulk operation into object and returns the object array in case of get request .
* < / pre > */
protected base_resource [ ] get_nitro_bulk_response ( nitro_service service , String response ) throws Exception { } } | xen_upgrade_responses result = ( xen_upgrade_responses ) service . get_payload_formatter ( ) . string_to_resource ( xen_upgrade_responses . class , response ) ; if ( result . errorcode != 0 ) { if ( result . errorcode == SESSION_NOT_EXISTS ) service . clear_session ( ) ; throw new nitro_exception ( result . message , result . errorcode , ( base_response [ ] ) result . xen_upgrade_response_array ) ; } xen_upgrade [ ] result_xen_upgrade = new xen_upgrade [ result . xen_upgrade_response_array . length ] ; for ( int i = 0 ; i < result . xen_upgrade_response_array . length ; i ++ ) { result_xen_upgrade [ i ] = result . xen_upgrade_response_array [ i ] . xen_upgrade [ 0 ] ; } return result_xen_upgrade ; |
public class FavoritesEditController { /** * Handles all Favorites portlet EDIT mode renders . Populates model with user ' s favorites and
* selects a view to display those favorites .
* < p > View selection :
* < p > Returns " jsp / Favorites / edit " in the normal case where the user has at least one favorited
* portlet or favorited collection .
* < p > Returns " jsp / Favorites / edit _ zero " in the edge case where the user has zero favorited
* portlets AND zero favorited collections .
* < p > Model : marketPlaceFname - - > String functional name of Marketplace portlet , or null if not
* available . collections - - > List of favorited collections ( IUserLayoutNodeDescription s )
* favorites - - > List of favorited individual portlets ( IUserLayoutNodeDescription s )
* successMessageCode - - > String success message bundle key , or null if none errorMessageCode
* - - > String error message bundle key , or null if none nameOfFavoriteActedUpon - - > Name of
* favorite acted upon , intended as parameter to success or error message
* @ param model . Spring model . This method adds five model attributes .
* @ return jsp / Favorites / edit [ _ zero ] */
@ RenderMapping public String initializeView ( Model model , RenderRequest renderRequest ) { } } | IUserInstance ui = userInstanceManager . getUserInstance ( portalRequestUtils . getCurrentPortalRequest ( ) ) ; UserPreferencesManager upm = ( UserPreferencesManager ) ui . getPreferencesManager ( ) ; IUserLayoutManager ulm = upm . getUserLayoutManager ( ) ; IUserLayout userLayout = ulm . getUserLayout ( ) ; // TODO : the portlet could predicate including a non - null marketplace portlet fname
// on the accessing user having permission to render the portlet referenced by that fname
// so that portlet would gracefully degrade when configured with bad marketplace portlet
// fname
// and also gracefully degrade when the accessing user doesn ' t have permission to access an
// otherwise
// viable configured marketplace . This complexity may not be worth it . Anyway it is not
// yet implemented .
model . addAttribute ( "marketplaceFname" , this . marketplaceFName ) ; List < IUserLayoutNodeDescription > collections = favoritesUtils . getFavoriteCollections ( userLayout ) ; model . addAttribute ( "collections" , collections ) ; List < IUserLayoutNodeDescription > favorites = favoritesUtils . getFavoritePortletLayoutNodes ( userLayout ) ; model . addAttribute ( "favorites" , favorites ) ; model . addAttribute ( "successMessageCode" , renderRequest . getParameter ( "successMessageCode" ) ) ; model . addAttribute ( "errorMessageCode" , renderRequest . getParameter ( "errorMessageCode" ) ) ; model . addAttribute ( "nameOfFavoriteActedUpon" , renderRequest . getParameter ( "nameOfFavoriteActedUpon" ) ) ; // default to the regular old edit view
String viewName = "jsp/Favorites/edit" ; if ( collections . isEmpty ( ) && favorites . isEmpty ( ) ) { // use the special case view
viewName = "jsp/Favorites/edit_zero" ; } logger . trace ( "Favorites Portlet EDIT mode built model [{}] and selected view {}." , model , viewName ) ; return viewName ; |
public class ControlBeanContextServicesSupport { /** * Serialize this instance including any serializable services and BeanContextServicesListeners .
* Any services or listeners which are not Serializable will not be present once deserialized .
* @ param oos
* @ throws IOException */
private synchronized void writeObject ( ObjectOutputStream oos ) throws IOException { } } | int serializable = 0 ; oos . defaultWriteObject ( ) ; // write out the service providers
Set < Map . Entry < Class , ServiceProvider > > providers = _serviceProviders . entrySet ( ) ; for ( Map . Entry < Class , ServiceProvider > provider : providers ) { if ( provider . getValue ( ) . isSerializable ( ) ) { serializable ++ ; } } oos . writeInt ( serializable ) ; if ( serializable > 0 ) { for ( Map . Entry < Class , ServiceProvider > entry : providers ) { if ( entry . getValue ( ) . isSerializable ( ) ) { oos . writeObject ( entry . getKey ( ) ) ; oos . writeObject ( entry . getValue ( ) ) ; } } } // write out the event listeners
serializable = 0 ; for ( BeanContextServicesListener l : _bcsListeners ) { if ( l instanceof Serializable ) { serializable ++ ; } } oos . writeInt ( serializable ) ; if ( serializable > 0 ) { for ( BeanContextServicesListener l : _bcsListeners ) { if ( l instanceof Serializable ) { oos . writeObject ( l ) ; } } } |
public class SipApplicationSessionImpl { /** * ( non - Javadoc )
* @ see javax . servlet . sip . SipApplicationSession # getSessions ( ) */
@ SuppressWarnings ( value = "unchecked" ) public Iterator < ? > getSessions ( ) { } } | if ( ! isValid ( ) ) { throw new IllegalStateException ( "SipApplicationSession already invalidated !" ) ; } Set < MobicentsSipSession > sipSessions = getSipSessions ( false ) ; Set < HttpSession > httpSessions = getHttpSessions ( ) ; Set protocolSessions = new HashSet ( ) ; protocolSessions . addAll ( httpSessions ) ; protocolSessions . addAll ( sipSessions ) ; return protocolSessions . iterator ( ) ; |
public class ObjectFactory { /** * Create an instance of { @ link JAXBElement } { @ code < } { @ link AbstractTimeComplexType } { @ code > }
* @ param value
* Java instance representing xml element ' s value .
* @ return
* the new instance of { @ link JAXBElement } { @ code < } { @ link AbstractTimeComplexType } { @ code > } */
@ XmlElementDecl ( namespace = "http://www.opengis.net/gml" , name = "_TimeComplex" , substitutionHeadNamespace = "http://www.opengis.net/gml" , substitutionHeadName = "_TimeObject" ) public JAXBElement < AbstractTimeComplexType > create_TimeComplex ( AbstractTimeComplexType value ) { } } | return new JAXBElement < AbstractTimeComplexType > ( __TimeComplex_QNAME , AbstractTimeComplexType . class , null , value ) ; |
public class HTMLGen { /** * - - - - - HTML Version Specific - - - - - */
public HTMLGen imports ( Imports imports ) { } } | // this . imports = imports ;
for ( String str : imports . css ) { forward . print ( "<link rel=\"stylesheet\" href=\"" ) ; forward . print ( imports . themePath ( null ) ) ; forward . print ( str ) ; forward . println ( "\">" ) ; } for ( String str : imports . js ) { forward . print ( "<script type=\"text/javascript\" src=\"" ) ; forward . print ( imports . themePath ( null ) ) ; forward . print ( str ) ; forward . println ( "\"></script>" ) ; } return this ; |
public class StatementServiceImp { /** * ( non - Javadoc )
* @ see com . popbill . api . StatementService # getPopUpURL ( java . lang . String , int , java . lang . String ) */
@ Override public String getPopUpURL ( String CorpNum , int ItemCode , String MgtKey ) throws PopbillException { } } | if ( MgtKey == null || MgtKey . isEmpty ( ) ) throw new PopbillException ( - 99999999 , "관리번호 가 입력되지 않았습니다." ) ; return getPopUpURL ( CorpNum , ItemCode , MgtKey , null ) ; |
public class SortaServiceImpl { /** * A helper function to check if the ontology term ( OT ) contains the ontology annotations provided
* in input . If the OT has the same annotation , the OT will be considered as a good match and the
* similarity scores 100 are allocated to the OT */
private Entity calculateNGromOTAnnotations ( Entity inputEntity , Entity ontologyTermEntity ) { } } | OntologyTermHitEntity mapEntity = new OntologyTermHitEntity ( ontologyTermEntity , ontologyTermHitMetaData ) ; for ( Entity annotationEntity : ontologyTermEntity . getEntities ( OntologyTermMetadata . ONTOLOGY_TERM_DYNAMIC_ANNOTATION ) ) { String annotationName = annotationEntity . getString ( OntologyTermDynamicAnnotationMetadata . NAME ) ; String annotationValue = annotationEntity . getString ( OntologyTermDynamicAnnotationMetadata . VALUE ) ; for ( String attributeName : inputEntity . getAttributeNames ( ) ) { if ( StringUtils . isNotEmpty ( inputEntity . getString ( attributeName ) ) && StringUtils . equalsIgnoreCase ( attributeName , annotationName ) && StringUtils . equalsIgnoreCase ( inputEntity . getString ( attributeName ) , annotationValue ) ) { mapEntity . set ( SCORE , 100d ) ; mapEntity . set ( COMBINED_SCORE , 100d ) ; return mapEntity ; } } } return mapEntity ; |
public class ComposedCharIter { /** * Returns the next precomposed Unicode character .
* Repeated calls to < tt > next < / tt > return all of the precomposed characters defined
* by Unicode , in ascending order . After all precomposed characters have
* been returned , { @ link # hasNext } will return < tt > false < / tt > and further calls
* to < tt > next < / tt > will return { @ link # DONE } .
* @ deprecated ICU 2.2 */
@ Deprecated public char next ( ) { } } | if ( nextChar == Normalizer . DONE ) { findNextChar ( ) ; } curChar = nextChar ; nextChar = Normalizer . DONE ; return ( char ) curChar ; |
public class TempFileProvider { /** * Create a temporary file provider for a given type .
* @ param providerType The provider type string ( used as a prefix in the temp file dir name )
* @ param executor Executor which will be used to manage temp file provider tasks ( like cleaning up / deleting the temp files when needed )
* @ param cleanExisting If this is true , then this method will * try * to delete the existing temp content ( if any ) for the < code > providerType < / code > . The attempt to delete the existing content ( if any )
* will be done in the background and this method will not wait for the deletion to complete . The method will immediately return back with a usable { @ link TempFileProvider } . Note that the
* < code > cleanExisting < / code > will just act as a hint for this method to trigger the deletion of existing content . The method may not always be able to delete the existing contents .
* @ return The new provider
* @ throws IOException if an I / O error occurs */
public static TempFileProvider create ( final String providerType , final ScheduledExecutorService executor , final boolean cleanExisting ) throws IOException { } } | if ( cleanExisting ) { try { // The " clean existing " logic is as follows :
// 1 ) Rename the root directory " foo " corresponding to the provider type to " bar "
// 2 ) Submit a task to delete " bar " and its contents , in a background thread , to the the passed executor .
// 3 ) Create a " foo " root directory for the provider type and return that TempFileProvider ( while at the same time the background task is in progress )
// This ensures that the " foo " root directory for the providerType is empty and the older content is being cleaned up in the background ( without affecting the current processing ) ,
// thus simulating a " cleanup existing content "
final File possiblyExistingProviderRoot = new File ( TMP_ROOT , providerType ) ; if ( possiblyExistingProviderRoot . exists ( ) ) { // rename it so that it can be deleted as a separate ( background ) task
final File toBeDeletedProviderRoot = new File ( TMP_ROOT , createTempName ( providerType + "-to-be-deleted-" , "" ) ) ; final boolean renamed = possiblyExistingProviderRoot . renameTo ( toBeDeletedProviderRoot ) ; if ( ! renamed ) { throw new IOException ( "Failed to rename " + possiblyExistingProviderRoot . getAbsolutePath ( ) + " to " + toBeDeletedProviderRoot . getAbsolutePath ( ) ) ; } else { // delete in the background
executor . submit ( new DeleteTask ( toBeDeletedProviderRoot , executor ) ) ; } } } catch ( Throwable t ) { // just log a message if existing contents couldn ' t be deleted
VFSLogger . ROOT_LOGGER . failedToCleanExistingContentForTempFileProvider ( providerType ) ; // log the cause of the failure
VFSLogger . ROOT_LOGGER . debug ( "Failed to clean existing content for temp file provider of type " + providerType , t ) ; } } // now create and return the TempFileProvider for the providerType
final File providerRoot = new File ( TMP_ROOT , providerType ) ; return new TempFileProvider ( createTempDir ( providerType , "" , providerRoot ) , executor ) ; |
public class SynchroLogger { /** * Log a string .
* @ param label label text
* @ param data string data */
public static void log ( String label , String data ) { } } | if ( LOG != null ) { LOG . write ( label ) ; LOG . write ( ": " ) ; LOG . println ( data ) ; LOG . flush ( ) ; } |
public class TileCache { /** * Updates the tile cache by rebuilding tiles touched by unfinished obstacle requests .
* @ return Returns true if the tile cache is fully up to date with obstacle requests and tile rebuilds . If the tile
* cache is up to date another ( immediate ) call to update will have no effect ; otherwise another call will
* continue processing obstacle requests and tile rebuilds . */
public boolean update ( ) { } } | if ( m_update . isEmpty ( ) ) { // Process requests .
for ( ObstacleRequest req : m_reqs ) { int idx = decodeObstacleIdObstacle ( req . ref ) ; if ( idx >= m_obstacles . size ( ) ) { continue ; } TileCacheObstacle ob = m_obstacles . get ( idx ) ; int salt = decodeObstacleIdSalt ( req . ref ) ; if ( ob . salt != salt ) { continue ; } if ( req . action == ObstacleRequestAction . REQUEST_ADD ) { // Find touched tiles .
float [ ] bmin = new float [ 3 ] ; float [ ] bmax = new float [ 3 ] ; getObstacleBounds ( ob , bmin , bmax ) ; ob . touched = queryTiles ( bmin , bmax ) ; // Add tiles to update list .
ob . pending . clear ( ) ; for ( long j : ob . touched ) { if ( ! contains ( m_update , j ) ) { m_update . add ( j ) ; } ob . pending . add ( j ) ; } } else if ( req . action == ObstacleRequestAction . REQUEST_REMOVE ) { // Prepare to remove obstacle .
ob . state = ObstacleState . DT_OBSTACLE_REMOVING ; // Add tiles to update list .
ob . pending . clear ( ) ; for ( long j : ob . touched ) { if ( ! contains ( m_update , j ) ) { m_update . add ( j ) ; } ob . pending . add ( j ) ; } } } m_reqs . clear ( ) ; } // Process updates
if ( ! m_update . isEmpty ( ) ) { long ref = m_update . remove ( 0 ) ; // Build mesh
buildNavMeshTile ( ref ) ; // Update obstacle states .
for ( int i = 0 ; i < m_obstacles . size ( ) ; ++ i ) { TileCacheObstacle ob = m_obstacles . get ( i ) ; if ( ob . state == ObstacleState . DT_OBSTACLE_PROCESSING || ob . state == ObstacleState . DT_OBSTACLE_REMOVING ) { // Remove handled tile from pending list .
ob . pending . remove ( ref ) ; // If all pending tiles processed , change state .
if ( ob . pending . isEmpty ( ) ) { if ( ob . state == ObstacleState . DT_OBSTACLE_PROCESSING ) { ob . state = ObstacleState . DT_OBSTACLE_PROCESSED ; } else if ( ob . state == ObstacleState . DT_OBSTACLE_REMOVING ) { ob . state = ObstacleState . DT_OBSTACLE_EMPTY ; // Update salt , salt should never be zero .
ob . salt = ( ob . salt + 1 ) & ( ( 1 << 16 ) - 1 ) ; if ( ob . salt == 0 ) { ob . salt ++ ; } // Return obstacle to free list .
ob . next = m_nextFreeObstacle ; m_nextFreeObstacle = ob ; } } } } } return m_update . isEmpty ( ) && m_reqs . isEmpty ( ) ; |
public class DurianPlugins { /** * Returns an implementation of the given class using the system properties as a registry . */
private static Object getPluginImplementationViaProperty ( Class < ? > pluginClass ) { } } | String className = pluginClass . getCanonicalName ( ) ; if ( className == null ) { throw new IllegalArgumentException ( "Class " + pluginClass + " does not have a canonical name!" ) ; } // Check system properties for plugin class .
// This will only happen during system startup thus it ' s okay to use the synchronized
// System . getProperties as it will never get called in normal operations .
String implementingClass = System . getProperty ( PROPERTY_PREFIX + className ) ; if ( implementingClass != null ) { try { Class < ? > cls = Class . forName ( implementingClass ) ; // narrow the scope ( cast ) to the type we ' re expecting
cls = cls . asSubclass ( pluginClass ) ; return cls . newInstance ( ) ; } catch ( ClassCastException e ) { throw new RuntimeException ( className + " implementation is not an instance of " + className + ": " + implementingClass ) ; } catch ( ClassNotFoundException e ) { throw new RuntimeException ( className + " implementation class not found: " + implementingClass , e ) ; } catch ( InstantiationException e ) { throw new RuntimeException ( className + " implementation not able to be instantiated: " + implementingClass , e ) ; } catch ( IllegalAccessException e ) { throw new RuntimeException ( className + " implementation not able to be accessed: " + implementingClass , e ) ; } } else { return null ; } |
public class HttpBase { /** * 移除一个头信息
* @ param name Header名
* @ return this */
public T removeHeader ( String name ) { } } | if ( name != null ) { headers . remove ( name . trim ( ) ) ; } return ( T ) this ; |
public class Ifc4PackageImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
@ Override public EClass getIfcParameterValue ( ) { } } | if ( ifcParameterValueEClass == null ) { ifcParameterValueEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( Ifc4Package . eNS_URI ) . getEClassifiers ( ) . get ( 849 ) ; } return ifcParameterValueEClass ; |
public class lbmonbindings_service_binding { /** * Use this API to fetch filtered set of lbmonbindings _ service _ binding resources .
* filter string should be in JSON format . eg : " port : 80 , servicetype : HTTP " . */
public static lbmonbindings_service_binding [ ] get_filtered ( nitro_service service , String monitorname , String filter ) throws Exception { } } | lbmonbindings_service_binding obj = new lbmonbindings_service_binding ( ) ; obj . set_monitorname ( monitorname ) ; options option = new options ( ) ; option . set_filter ( filter ) ; lbmonbindings_service_binding [ ] response = ( lbmonbindings_service_binding [ ] ) obj . getfiltered ( service , option ) ; return response ; |
public class PrivateTaskScheduler { /** * Free / Kill this task . */
public void free ( ) { } } | if ( m_bKeepAlive ) { m_bKeepAlive = false ; // Don ' t stay alive
m_bThreadSuspended = false ; // Unsuspend this puppy
try { this . notify ( ) ; // If you ' re waiting , continue ( and the thread will die )
} catch ( IllegalMonitorStateException ex ) { // Ignore
} } super . free ( ) ; |
public class LaTeXMLConverter { /** * Parses latex to MathML string via LaTeXML ( locally )
* @ param arguments
* @ param latex
* @ return */
public String convertToString ( List < String > arguments , String latex ) { } } | NativeResponse response = parseToNativeResponse ( arguments , latex ) ; if ( handleResponseCode ( response , NAME , LOG ) != 0 ) { return null ; } LOG . info ( NAME + " conversion successful." ) ; return response . getResult ( ) ; |
public class CsvFileDownloader { /** * Provide iterator via OpenCSV ' s CSVReader .
* Provides a way to skip top rows by providing regex . ( This is useful when CSV file comes with comments on top rows , but not in fixed size .
* It also provides validation on schema by matching header names between property ' s schema and header name in CSV file .
* { @ inheritDoc }
* @ see org . apache . gobblin . source . extractor . filebased . FileDownloader # downloadFile ( java . lang . String ) */
@ SuppressWarnings ( "unchecked" ) @ Override public Iterator < String [ ] > downloadFile ( String file ) throws IOException { } } | log . info ( "Beginning to download file: " + file ) ; final State state = fileBasedExtractor . workUnitState ; CSVReader reader ; try { if ( state . contains ( DELIMITER ) ) { String delimiterStr = state . getProp ( DELIMITER ) . trim ( ) ; Preconditions . checkArgument ( delimiterStr . length ( ) == 1 , "Delimiter should be a character." ) ; char delimiter = delimiterStr . charAt ( 0 ) ; log . info ( "Using " + delimiter + " as a delimiter." ) ; reader = this . fileBasedExtractor . getCloser ( ) . register ( new CSVReader ( new InputStreamReader ( this . fileBasedExtractor . getFsHelper ( ) . getFileStream ( file ) , ConfigurationKeys . DEFAULT_CHARSET_ENCODING ) , delimiter ) ) ; } else { reader = this . fileBasedExtractor . getCloser ( ) . register ( new CSVReader ( new InputStreamReader ( this . fileBasedExtractor . getFsHelper ( ) . getFileStream ( file ) , ConfigurationKeys . DEFAULT_CHARSET_ENCODING ) ) ) ; } } catch ( FileBasedHelperException e ) { throw new IOException ( e ) ; } PeekingIterator < String [ ] > iterator = Iterators . peekingIterator ( reader . iterator ( ) ) ; if ( state . contains ( SKIP_TOP_ROWS_REGEX ) ) { String regex = state . getProp ( SKIP_TOP_ROWS_REGEX ) ; log . info ( "Trying to skip with regex: " + regex ) ; while ( iterator . hasNext ( ) ) { String [ ] row = iterator . peek ( ) ; if ( row . length == 0 ) { break ; } if ( ! row [ 0 ] . matches ( regex ) ) { break ; } iterator . next ( ) ; } } if ( this . fileBasedExtractor . isShouldSkipFirstRecord ( ) && iterator . hasNext ( ) ) { log . info ( "Skipping first record" ) ; iterator . next ( ) ; } return iterator ; |
public class JcrServiceImpl { /** * Picks access entry for the given principal .
* @ param acl
* @ param principal
* @ return the ACL entry
* @ throws RepositoryException */
private AccessControlEntry pick ( AccessControlList acl , String principal ) throws RepositoryException { } } | for ( AccessControlEntry entry : acl . getAccessControlEntries ( ) ) { if ( entry . getPrincipal ( ) . getName ( ) . equals ( principal ) ) { return entry ; } } return null ; |
public class BrushedMetalFilter { /** * Return a mod b . This differs from the % operator with respect to negative numbers .
* @ param a the dividend
* @ param B the divisor
* @ return a mod b */
private static int mod ( int a , final int B ) { } } | final int N = a / B ; a -= N * B ; if ( a < 0 ) { return a + B ; } return a ; |
public class ShReplicatedSessionDataFactory { /** * ( non - Javadoc )
* @ see org . jdiameter . common . api . app . IAppSessionDataFactory # getAppSessionData ( java . lang . Class , java . lang . String ) */
@ Override public IShSessionData getAppSessionData ( Class < ? extends AppSession > clazz , String sessionId ) { } } | if ( clazz . equals ( ClientShSession . class ) ) { ShClientSessionDataReplicatedImpl data = new ShClientSessionDataReplicatedImpl ( sessionId , this . mobicentsCluster ) ; return data ; } else if ( clazz . equals ( ServerShSession . class ) ) { ShServerSessionDataReplicatedImpl data = new ShServerSessionDataReplicatedImpl ( sessionId , this . mobicentsCluster ) ; return data ; } throw new IllegalArgumentException ( ) ; |
public class DocumentTemplateRepository { /** * region > validate . . . */
@ Programmatic public TranslatableString validateApplicationTenancyAndDate ( final DocumentType proposedType , final String proposedAtPath , final LocalDate proposedDate , final DocumentTemplate ignore ) { } } | final List < DocumentTemplate > existingTemplates = findByTypeAndAtPath ( proposedType , proposedAtPath ) ; for ( DocumentTemplate existingTemplate : existingTemplates ) { if ( existingTemplate == ignore ) { continue ; } if ( java . util . Objects . equals ( existingTemplate . getDate ( ) , proposedDate ) ) { return TranslatableString . tr ( "A template already exists for this date" ) ; } if ( proposedDate == null && existingTemplate . getDate ( ) != null ) { return TranslatableString . tr ( "Must provide a date (there are existing templates that already have a date specified)" ) ; } } return null ; |
public class ClassUtil { /** * 获得资源相对路径对应的URL
* @ param resource 资源相对路径
* @ param baseClass 基准Class , 获得的相对路径相对于此Class所在路径 , 如果为 { @ code null } 则相对ClassPath
* @ return { @ link URL }
* @ see ResourceUtil # getResource ( String , Class ) */
public static URL getResourceUrl ( String resource , Class < ? > baseClass ) { } } | return ResourceUtil . getResource ( resource , baseClass ) ; |
public class DescribeGatewayInformationRequestMarshaller { /** * Marshall the given parameter object . */
public void marshall ( DescribeGatewayInformationRequest describeGatewayInformationRequest , ProtocolMarshaller protocolMarshaller ) { } } | if ( describeGatewayInformationRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( describeGatewayInformationRequest . getGatewayARN ( ) , GATEWAYARN_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class ShardedCounterServiceImpl { /** * Helper method to determine if a counter ' s incrementAmount can be mutated ( incremented or decremented ) . In order
* for that to happen , the counter ' s status must be { @ link CounterStatus # AVAILABLE } .
* @ param counterName The name of the counter .
* @ param counterStatus The { @ link CounterStatus } of a counter that is currently stored in the Datastore .
* @ return */
@ VisibleForTesting protected void assertCounterDetailsMutatable ( final String counterName , final CounterStatus counterStatus ) { } } | Preconditions . checkNotNull ( counterName ) ; Preconditions . checkNotNull ( counterStatus ) ; if ( counterStatus != CounterStatus . AVAILABLE && counterStatus != CounterStatus . READ_ONLY_COUNT ) { final String msg = String . format ( "Can't mutate with status %s. Counter must be in in the %s or %s state!" , counterStatus , CounterStatus . AVAILABLE , CounterStatus . READ_ONLY_COUNT ) ; throw new CounterNotMutableException ( counterName , msg ) ; } |
public class GeoPackageProperties { /** * Initialize the configuration properties
* @ return properties */
private static Properties initializeConfigurationProperties ( ) { } } | Properties properties = new Properties ( ) ; InputStream in = GeoPackageProperties . class . getResourceAsStream ( "/" + PropertyConstants . PROPERTIES_FILE ) ; if ( in != null ) { try { properties . load ( in ) ; } catch ( Exception e ) { log . log ( Level . SEVERE , "Failed to load properties file: " + PropertyConstants . PROPERTIES_FILE , e ) ; } finally { try { in . close ( ) ; } catch ( IOException e ) { log . log ( Level . WARNING , "Failed to close properties file: " + PropertyConstants . PROPERTIES_FILE , e ) ; } } } else { log . log ( Level . SEVERE , "Failed to load properties, file not found: " + PropertyConstants . PROPERTIES_FILE ) ; } return properties ; |
public class WebLocatorAbstractBuilder { /** * < p > < b > Used for finding element process ( to generate xpath address ) < / b > < / p >
* < p > Use it when element must have all specified css classes ( order is not important ) . < / p >
* < ul >
* < li > Provided classes must be conform css rules . < / li >
* < / ul >
* @ param classes list of classes
* @ param < T > the element which calls this method
* @ return this element */
@ SuppressWarnings ( "unchecked" ) public < T extends WebLocatorAbstractBuilder > T setClasses ( final String ... classes ) { } } | pathBuilder . setClasses ( classes ) ; return ( T ) this ; |
public class SingleDbJDBCConnection { /** * { @ inheritDoc } */
@ Override protected int deleteValueData ( String cid ) throws SQLException , InvalidItemStateException , RepositoryException { } } | if ( deleteValue == null ) { deleteValue = dbConnection . prepareStatement ( DELETE_VALUE ) ; } else { deleteValue . clearParameters ( ) ; } deleteValue . setString ( 1 , cid ) ; return executeUpdate ( deleteValue , TYPE_DELETE_VALUE ) ; |
public class Manager { /** * Sends a { @ link Crouton } within a delayed { @ link Message } .
* @ param crouton
* The { @ link Crouton } that should be sent .
* @ param messageId
* The { @ link Message } id .
* @ param delay
* The delay in milliseconds . */
private void sendMessageDelayed ( Crouton crouton , final int messageId , final long delay ) { } } | Message message = obtainMessage ( messageId ) ; message . obj = crouton ; sendMessageDelayed ( message , delay ) ; |
public class KernelUtils { /** * This
* @ param stateDir */
public static void cleanDirectory ( File dir , String dirType ) { } } | boolean cleaned = true ; if ( dir . exists ( ) && dir . isDirectory ( ) ) cleaned = FileUtils . recursiveClean ( dir ) ; if ( ! cleaned ) throw new IllegalStateException ( "The " + dirType + " could not be cleaned. " + dirType + "=" + dir ) ; // re - create empty directory if it doesn ' t exist
boolean created = dir . mkdirs ( ) ; if ( ! dir . exists ( ) && ! created ) throw new IllegalStateException ( "The " + dirType + " could not be created. " + dirType + "=" + dir ) ; |
public class ReflectionMethods { /** * Finds the public , protected , default or private method of the object with the provided name and parameters .
* @ param obj
* @ param methodName
* @ param params
* @ return */
public static Method findMethod ( Object obj , String methodName , Object ... params ) { } } | Class < ? > [ ] classArray = new Class < ? > [ params . length ] ; for ( int i = 0 ; i < params . length ; i ++ ) { classArray [ i ] = params [ i ] . getClass ( ) ; } try { // look on all the public , protected , default and private methods of the class hierarchy .
Class < ? > klass = obj . getClass ( ) ; while ( klass != null ) { for ( Method method : klass . getDeclaredMethods ( ) ) { // check if the method name matches along with the number of parameters
if ( method . getName ( ) . equals ( methodName ) && method . getParameterCount ( ) == classArray . length ) { Class < ? > [ ] paramClasses = method . getParameterTypes ( ) ; // Then check one by one all the provided parameters and see if they match the defined ones .
boolean parametersMatch = true ; for ( int i = 0 ; i < params . length ; i ++ ) { if ( ! paramClasses [ i ] . isAssignableFrom ( classArray [ i ] ) ) { parametersMatch = false ; break ; } } if ( parametersMatch ) { // exact match , return the method
return method ; } } } klass = klass . getSuperclass ( ) ; } // nothing found
throw new NoSuchMethodException ( ) ; } catch ( NoSuchMethodException ex ) { throw new RuntimeException ( ex ) ; } |
public class HeaderViewRecyclerAdapter { /** * Replaces the underlying adapter , notifying RecyclerView of changes
* @ param adapter The new adapter to wrap */
public void setAdapter ( RecyclerView . Adapter adapter ) { } } | if ( mWrappedAdapter != null && mWrappedAdapter . getItemCount ( ) > 0 ) { notifyItemRangeRemoved ( getHeaderCount ( ) , mWrappedAdapter . getItemCount ( ) ) ; } setWrappedAdapter ( adapter ) ; notifyItemRangeInserted ( getHeaderCount ( ) , mWrappedAdapter . getItemCount ( ) ) ; |
public class SharedElementTransitionChangeHandler { /** * The transition will be delayed until the view with the name passed in is available in the " to " hierarchy . This is
* particularly useful for views that don ' t load instantly , like RecyclerViews . Note that using this method can
* potentially lock up your app indefinitely if the view never loads ! */
protected final void waitOnSharedElementNamed ( @ NonNull String name ) { } } | if ( ! sharedElementNames . values ( ) . contains ( name ) ) { throw new IllegalStateException ( "Can't wait on a shared element that hasn't been registered using addSharedElement" ) ; } waitForTransitionNames . add ( name ) ; |
public class ImgUtil { /** * 缩放图像 ( 按高度和宽度缩放 ) < br >
* 缩放后默认为jpeg格式 , 此方法并不关闭流
* @ param srcStream 源图像流
* @ param destStream 缩放后的图像目标流
* @ param width 缩放后的宽度
* @ param height 缩放后的高度
* @ param fixedColor 比例不对时补充的颜色 , 不补充为 < code > null < / code >
* @ throws IORuntimeException IO异常 */
public static void scale ( InputStream srcStream , OutputStream destStream , int width , int height , Color fixedColor ) throws IORuntimeException { } } | scale ( read ( srcStream ) , getImageOutputStream ( destStream ) , width , height , fixedColor ) ; |
public class ParseUtils { /** * Get an exception reporting that an element of a given type and name has
* already been declared in this scope .
* @ param reader the stream reader
* @ param name the name that was redeclared
* @ return the exception */
public static XMLStreamException duplicateNamedElement ( final XMLStreamReader reader , final String name ) { } } | return new XMLStreamException ( "An element of this type named '" + name + "' has already been declared" , reader . getLocation ( ) ) ; |
public class BroxWarpingSpacial { /** * Inner SOR iteration step
* @ param i Index of target pixel at ( x , y )
* @ param ipx ( x + 1 , y )
* @ param imx ( x - 1 , y )
* @ param ipy ( x , y + 1)
* @ param imy ( x , y - 1) */
private float iterationSor ( GrayF32 image1 , GrayF32 deriv1X , GrayF32 deriv1Y , int i , int ipx , int imx , int ipy , int imy ) { } } | float w = SOR_RELAXATION ; // these variables could be precomputed once . See equation 11
float psid = psiData . data [ i ] ; float psig = gamma * psiGradient . data [ i ] ; float di = warpImage2 . data [ i ] - image1 . data [ i ] ; float dx2 = warpDeriv2X . data [ i ] ; float dy2 = warpDeriv2Y . data [ i ] ; float dxx2 = warpDeriv2XX . data [ i ] ; float dyy2 = warpDeriv2YY . data [ i ] ; float dxy2 = warpDeriv2XY . data [ i ] ; float Au = - psid * di * dx2 + alpha * divU . data [ i ] - psig * ( ( dx2 - deriv1X . data [ i ] ) * warpDeriv2XX . data [ i ] + ( dy2 - deriv1Y . data [ i ] ) * warpDeriv2XY . data [ i ] ) ; float Av = - psid * di * dy2 + alpha * divV . data [ i ] - psig * ( ( dx2 - deriv1X . data [ i ] ) * warpDeriv2XY . data [ i ] + ( dy2 - deriv1Y . data [ i ] ) * warpDeriv2YY . data [ i ] ) ; float Du = psid * dx2 * dx2 + psig * ( dxx2 * dxx2 + dxy2 * dxy2 ) + alpha * divD . data [ i ] ; float Dv = psid * dy2 * dy2 + psig * ( dyy2 * dyy2 + dxy2 * dxy2 ) + alpha * divD . data [ i ] ; float D = psid * dx2 * dy2 + psig * ( dxx2 + dyy2 ) * dxy2 ; // update the change in flow
float psi_index = psiSmooth . data [ i ] ; float coef0 = 0.5f * ( psiSmooth . data [ ipx ] + psi_index ) ; float coef1 = 0.5f * ( psiSmooth . data [ imx ] + psi_index ) ; float coef2 = 0.5f * ( psiSmooth . data [ ipy ] + psi_index ) ; float coef3 = 0.5f * ( psiSmooth . data [ imy ] + psi_index ) ; float div_du = coef0 * du . data [ ipx ] + coef1 * du . data [ imx ] + coef2 * du . data [ ipy ] + coef3 * du . data [ imy ] ; float div_dv = coef0 * dv . data [ ipx ] + coef1 * dv . data [ imx ] + coef2 * dv . data [ ipy ] + coef3 * dv . data [ imy ] ; final float dui = du . data [ i ] ; final float dvi = dv . data [ i ] ; du . data [ i ] = ( 1f - w ) * dui + w * ( Au - D * dvi + alpha * div_du ) / Du ; dv . data [ i ] = ( 1f - w ) * dvi + w * ( Av - D * du . data [ i ] + alpha * div_dv ) / Dv ; return ( du . data [ i ] - dui ) * ( du . data [ i ] - dui ) + ( dv . data [ i ] - dvi ) * ( dv . data [ i ] - dvi ) ; |
public class InternalSARLParser { /** * InternalSARL . g : 11518:1 : ruleXAnnotation returns [ EObject current = null ] : ( ( ) otherlv _ 1 = ' @ ' ( ( ruleQualifiedName ) ) ( ( ( ' ( ' ) = > otherlv _ 3 = ' ( ' ) ( ( ( ( ( ( ( ruleValidID ) ) ' = ' ) ) = > ( lv _ elementValuePairs _ 4_0 = ruleXAnnotationElementValuePair ) ) ( otherlv _ 5 = ' , ' ( ( ( ( ( ruleValidID ) ) ' = ' ) ) = > ( lv _ elementValuePairs _ 6_0 = ruleXAnnotationElementValuePair ) ) ) * ) | ( ( lv _ value _ 7_0 = ruleXAnnotationElementValueOrCommaList ) ) ) ? otherlv _ 8 = ' ) ' ) ? ) ; */
public final EObject ruleXAnnotation ( ) throws RecognitionException { } } | EObject current = null ; Token otherlv_1 = null ; Token otherlv_3 = null ; Token otherlv_5 = null ; Token otherlv_8 = null ; EObject lv_elementValuePairs_4_0 = null ; EObject lv_elementValuePairs_6_0 = null ; EObject lv_value_7_0 = null ; enterRule ( ) ; try { // InternalSARL . g : 11524:2 : ( ( ( ) otherlv _ 1 = ' @ ' ( ( ruleQualifiedName ) ) ( ( ( ' ( ' ) = > otherlv _ 3 = ' ( ' ) ( ( ( ( ( ( ( ruleValidID ) ) ' = ' ) ) = > ( lv _ elementValuePairs _ 4_0 = ruleXAnnotationElementValuePair ) ) ( otherlv _ 5 = ' , ' ( ( ( ( ( ruleValidID ) ) ' = ' ) ) = > ( lv _ elementValuePairs _ 6_0 = ruleXAnnotationElementValuePair ) ) ) * ) | ( ( lv _ value _ 7_0 = ruleXAnnotationElementValueOrCommaList ) ) ) ? otherlv _ 8 = ' ) ' ) ? ) )
// InternalSARL . g : 11525:2 : ( ( ) otherlv _ 1 = ' @ ' ( ( ruleQualifiedName ) ) ( ( ( ' ( ' ) = > otherlv _ 3 = ' ( ' ) ( ( ( ( ( ( ( ruleValidID ) ) ' = ' ) ) = > ( lv _ elementValuePairs _ 4_0 = ruleXAnnotationElementValuePair ) ) ( otherlv _ 5 = ' , ' ( ( ( ( ( ruleValidID ) ) ' = ' ) ) = > ( lv _ elementValuePairs _ 6_0 = ruleXAnnotationElementValuePair ) ) ) * ) | ( ( lv _ value _ 7_0 = ruleXAnnotationElementValueOrCommaList ) ) ) ? otherlv _ 8 = ' ) ' ) ? )
{ // InternalSARL . g : 11525:2 : ( ( ) otherlv _ 1 = ' @ ' ( ( ruleQualifiedName ) ) ( ( ( ' ( ' ) = > otherlv _ 3 = ' ( ' ) ( ( ( ( ( ( ( ruleValidID ) ) ' = ' ) ) = > ( lv _ elementValuePairs _ 4_0 = ruleXAnnotationElementValuePair ) ) ( otherlv _ 5 = ' , ' ( ( ( ( ( ruleValidID ) ) ' = ' ) ) = > ( lv _ elementValuePairs _ 6_0 = ruleXAnnotationElementValuePair ) ) ) * ) | ( ( lv _ value _ 7_0 = ruleXAnnotationElementValueOrCommaList ) ) ) ? otherlv _ 8 = ' ) ' ) ? )
// InternalSARL . g : 11526:3 : ( ) otherlv _ 1 = ' @ ' ( ( ruleQualifiedName ) ) ( ( ( ' ( ' ) = > otherlv _ 3 = ' ( ' ) ( ( ( ( ( ( ( ruleValidID ) ) ' = ' ) ) = > ( lv _ elementValuePairs _ 4_0 = ruleXAnnotationElementValuePair ) ) ( otherlv _ 5 = ' , ' ( ( ( ( ( ruleValidID ) ) ' = ' ) ) = > ( lv _ elementValuePairs _ 6_0 = ruleXAnnotationElementValuePair ) ) ) * ) | ( ( lv _ value _ 7_0 = ruleXAnnotationElementValueOrCommaList ) ) ) ? otherlv _ 8 = ' ) ' ) ?
{ // InternalSARL . g : 11526:3 : ( )
// InternalSARL . g : 11527:4:
{ if ( state . backtracking == 0 ) { current = forceCreateModelElement ( grammarAccess . getXAnnotationAccess ( ) . getXAnnotationAction_0 ( ) , current ) ; } } otherlv_1 = ( Token ) match ( input , 105 , FOLLOW_3 ) ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { newLeafNode ( otherlv_1 , grammarAccess . getXAnnotationAccess ( ) . getCommercialAtKeyword_1 ( ) ) ; } // InternalSARL . g : 11537:3 : ( ( ruleQualifiedName ) )
// InternalSARL . g : 11538:4 : ( ruleQualifiedName )
{ // InternalSARL . g : 11538:4 : ( ruleQualifiedName )
// InternalSARL . g : 11539:5 : ruleQualifiedName
{ if ( state . backtracking == 0 ) { if ( current == null ) { current = createModelElement ( grammarAccess . getXAnnotationRule ( ) ) ; } } if ( state . backtracking == 0 ) { newCompositeNode ( grammarAccess . getXAnnotationAccess ( ) . getAnnotationTypeJvmAnnotationTypeCrossReference_2_0 ( ) ) ; } pushFollow ( FOLLOW_108 ) ; ruleQualifiedName ( ) ; state . _fsp -- ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { afterParserOrEnumRuleCall ( ) ; } } } // InternalSARL . g : 11553:3 : ( ( ( ' ( ' ) = > otherlv _ 3 = ' ( ' ) ( ( ( ( ( ( ( ruleValidID ) ) ' = ' ) ) = > ( lv _ elementValuePairs _ 4_0 = ruleXAnnotationElementValuePair ) ) ( otherlv _ 5 = ' , ' ( ( ( ( ( ruleValidID ) ) ' = ' ) ) = > ( lv _ elementValuePairs _ 6_0 = ruleXAnnotationElementValuePair ) ) ) * ) | ( ( lv _ value _ 7_0 = ruleXAnnotationElementValueOrCommaList ) ) ) ? otherlv _ 8 = ' ) ' ) ?
int alt288 = 2 ; int LA288_0 = input . LA ( 1 ) ; if ( ( LA288_0 == 49 ) && ( synpred24_InternalSARL ( ) ) ) { alt288 = 1 ; } switch ( alt288 ) { case 1 : // InternalSARL . g : 11554:4 : ( ( ' ( ' ) = > otherlv _ 3 = ' ( ' ) ( ( ( ( ( ( ( ruleValidID ) ) ' = ' ) ) = > ( lv _ elementValuePairs _ 4_0 = ruleXAnnotationElementValuePair ) ) ( otherlv _ 5 = ' , ' ( ( ( ( ( ruleValidID ) ) ' = ' ) ) = > ( lv _ elementValuePairs _ 6_0 = ruleXAnnotationElementValuePair ) ) ) * ) | ( ( lv _ value _ 7_0 = ruleXAnnotationElementValueOrCommaList ) ) ) ? otherlv _ 8 = ' ) '
{ // InternalSARL . g : 11554:4 : ( ( ' ( ' ) = > otherlv _ 3 = ' ( ' )
// InternalSARL . g : 11555:5 : ( ' ( ' ) = > otherlv _ 3 = ' ( '
{ otherlv_3 = ( Token ) match ( input , 49 , FOLLOW_109 ) ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { newLeafNode ( otherlv_3 , grammarAccess . getXAnnotationAccess ( ) . getLeftParenthesisKeyword_3_0 ( ) ) ; } } // InternalSARL . g : 11561:4 : ( ( ( ( ( ( ( ruleValidID ) ) ' = ' ) ) = > ( lv _ elementValuePairs _ 4_0 = ruleXAnnotationElementValuePair ) ) ( otherlv _ 5 = ' , ' ( ( ( ( ( ruleValidID ) ) ' = ' ) ) = > ( lv _ elementValuePairs _ 6_0 = ruleXAnnotationElementValuePair ) ) ) * ) | ( ( lv _ value _ 7_0 = ruleXAnnotationElementValueOrCommaList ) ) ) ?
int alt287 = 3 ; alt287 = dfa287 . predict ( input ) ; switch ( alt287 ) { case 1 : // InternalSARL . g : 11562:5 : ( ( ( ( ( ( ruleValidID ) ) ' = ' ) ) = > ( lv _ elementValuePairs _ 4_0 = ruleXAnnotationElementValuePair ) ) ( otherlv _ 5 = ' , ' ( ( ( ( ( ruleValidID ) ) ' = ' ) ) = > ( lv _ elementValuePairs _ 6_0 = ruleXAnnotationElementValuePair ) ) ) * )
{ // InternalSARL . g : 11562:5 : ( ( ( ( ( ( ruleValidID ) ) ' = ' ) ) = > ( lv _ elementValuePairs _ 4_0 = ruleXAnnotationElementValuePair ) ) ( otherlv _ 5 = ' , ' ( ( ( ( ( ruleValidID ) ) ' = ' ) ) = > ( lv _ elementValuePairs _ 6_0 = ruleXAnnotationElementValuePair ) ) ) * )
// InternalSARL . g : 11563:6 : ( ( ( ( ( ruleValidID ) ) ' = ' ) ) = > ( lv _ elementValuePairs _ 4_0 = ruleXAnnotationElementValuePair ) ) ( otherlv _ 5 = ' , ' ( ( ( ( ( ruleValidID ) ) ' = ' ) ) = > ( lv _ elementValuePairs _ 6_0 = ruleXAnnotationElementValuePair ) ) ) *
{ // InternalSARL . g : 11563:6 : ( ( ( ( ( ruleValidID ) ) ' = ' ) ) = > ( lv _ elementValuePairs _ 4_0 = ruleXAnnotationElementValuePair ) )
// InternalSARL . g : 11564:7 : ( ( ( ( ruleValidID ) ) ' = ' ) ) = > ( lv _ elementValuePairs _ 4_0 = ruleXAnnotationElementValuePair )
{ // InternalSARL . g : 11573:7 : ( lv _ elementValuePairs _ 4_0 = ruleXAnnotationElementValuePair )
// InternalSARL . g : 11574:8 : lv _ elementValuePairs _ 4_0 = ruleXAnnotationElementValuePair
{ if ( state . backtracking == 0 ) { newCompositeNode ( grammarAccess . getXAnnotationAccess ( ) . getElementValuePairsXAnnotationElementValuePairParserRuleCall_3_1_0_0_0 ( ) ) ; } pushFollow ( FOLLOW_51 ) ; lv_elementValuePairs_4_0 = ruleXAnnotationElementValuePair ( ) ; state . _fsp -- ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { if ( current == null ) { current = createModelElementForParent ( grammarAccess . getXAnnotationRule ( ) ) ; } add ( current , "elementValuePairs" , lv_elementValuePairs_4_0 , "org.eclipse.xtext.xbase.annotations.XbaseWithAnnotations.XAnnotationElementValuePair" ) ; afterParserOrEnumRuleCall ( ) ; } } } // InternalSARL . g : 11591:6 : ( otherlv _ 5 = ' , ' ( ( ( ( ( ruleValidID ) ) ' = ' ) ) = > ( lv _ elementValuePairs _ 6_0 = ruleXAnnotationElementValuePair ) ) ) *
loop286 : do { int alt286 = 2 ; int LA286_0 = input . LA ( 1 ) ; if ( ( LA286_0 == 32 ) ) { alt286 = 1 ; } switch ( alt286 ) { case 1 : // InternalSARL . g : 11592:7 : otherlv _ 5 = ' , ' ( ( ( ( ( ruleValidID ) ) ' = ' ) ) = > ( lv _ elementValuePairs _ 6_0 = ruleXAnnotationElementValuePair ) )
{ otherlv_5 = ( Token ) match ( input , 32 , FOLLOW_3 ) ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { newLeafNode ( otherlv_5 , grammarAccess . getXAnnotationAccess ( ) . getCommaKeyword_3_1_0_1_0 ( ) ) ; } // InternalSARL . g : 11596:7 : ( ( ( ( ( ruleValidID ) ) ' = ' ) ) = > ( lv _ elementValuePairs _ 6_0 = ruleXAnnotationElementValuePair ) )
// InternalSARL . g : 11597:8 : ( ( ( ( ruleValidID ) ) ' = ' ) ) = > ( lv _ elementValuePairs _ 6_0 = ruleXAnnotationElementValuePair )
{ // InternalSARL . g : 11606:8 : ( lv _ elementValuePairs _ 6_0 = ruleXAnnotationElementValuePair )
// InternalSARL . g : 11607:9 : lv _ elementValuePairs _ 6_0 = ruleXAnnotationElementValuePair
{ if ( state . backtracking == 0 ) { newCompositeNode ( grammarAccess . getXAnnotationAccess ( ) . getElementValuePairsXAnnotationElementValuePairParserRuleCall_3_1_0_1_1_0 ( ) ) ; } pushFollow ( FOLLOW_51 ) ; lv_elementValuePairs_6_0 = ruleXAnnotationElementValuePair ( ) ; state . _fsp -- ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { if ( current == null ) { current = createModelElementForParent ( grammarAccess . getXAnnotationRule ( ) ) ; } add ( current , "elementValuePairs" , lv_elementValuePairs_6_0 , "org.eclipse.xtext.xbase.annotations.XbaseWithAnnotations.XAnnotationElementValuePair" ) ; afterParserOrEnumRuleCall ( ) ; } } } } break ; default : break loop286 ; } } while ( true ) ; } } break ; case 2 : // InternalSARL . g : 11627:5 : ( ( lv _ value _ 7_0 = ruleXAnnotationElementValueOrCommaList ) )
{ // InternalSARL . g : 11627:5 : ( ( lv _ value _ 7_0 = ruleXAnnotationElementValueOrCommaList ) )
// InternalSARL . g : 11628:6 : ( lv _ value _ 7_0 = ruleXAnnotationElementValueOrCommaList )
{ // InternalSARL . g : 11628:6 : ( lv _ value _ 7_0 = ruleXAnnotationElementValueOrCommaList )
// InternalSARL . g : 11629:7 : lv _ value _ 7_0 = ruleXAnnotationElementValueOrCommaList
{ if ( state . backtracking == 0 ) { newCompositeNode ( grammarAccess . getXAnnotationAccess ( ) . getValueXAnnotationElementValueOrCommaListParserRuleCall_3_1_1_0 ( ) ) ; } pushFollow ( FOLLOW_81 ) ; lv_value_7_0 = ruleXAnnotationElementValueOrCommaList ( ) ; state . _fsp -- ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { if ( current == null ) { current = createModelElementForParent ( grammarAccess . getXAnnotationRule ( ) ) ; } set ( current , "value" , lv_value_7_0 , "org.eclipse.xtext.xbase.annotations.XbaseWithAnnotations.XAnnotationElementValueOrCommaList" ) ; afterParserOrEnumRuleCall ( ) ; } } } } break ; } otherlv_8 = ( Token ) match ( input , 50 , FOLLOW_2 ) ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { newLeafNode ( otherlv_8 , grammarAccess . getXAnnotationAccess ( ) . getRightParenthesisKeyword_3_2 ( ) ) ; } } break ; } } } if ( state . backtracking == 0 ) { leaveRule ( ) ; } } catch ( RecognitionException re ) { recover ( input , re ) ; appendSkippedTokens ( ) ; } finally { } return current ; |
public class Expiration { /** * Return expiry for a given cache operation . It returns null when the
* expiry time cannot be determined , in which case clients should not update
* expiry settings for the cached entry . */
public static Duration getExpiry ( ExpiryPolicy policy , Operation op ) { } } | if ( policy == null ) { return getDefaultDuration ( ) ; } switch ( op ) { case CREATION : try { return policy . getExpiryForCreation ( ) ; } catch ( Throwable t ) { log . getExpiryHasThrown ( t ) ; return getDefaultDuration ( ) ; } case ACCESS : try { return policy . getExpiryForAccess ( ) ; } catch ( Throwable t ) { log . getExpiryHasThrown ( t ) ; // If an exception is thrown , leave expiration untouched
return null ; } case UPDATE : try { return policy . getExpiryForUpdate ( ) ; } catch ( Throwable t ) { log . getExpiryHasThrown ( t ) ; // If an exception is thrown , leave expiration untouched
return null ; } default : throw log . unknownExpiryOperation ( op . toString ( ) ) ; } |
public class DocumentVersionInfo { /** * Returns the version document of the given document , if any ; returns null otherwise .
* @ param document the document to get the version from .
* @ return the version of the given document , if any ; returns null otherwise . */
static BsonDocument getDocumentVersionDoc ( final BsonDocument document ) { } } | if ( document == null || ! document . containsKey ( DOCUMENT_VERSION_FIELD ) ) { return null ; } return document . getDocument ( DOCUMENT_VERSION_FIELD , null ) ; |
public class ElementLayout { /** * Checks for a circular reference to the same linked layout , throwing an exception if found . */
private void checkForCircularReference ( ) { } } | ElementLayout layout = this ; while ( ( layout = layout . getAncestor ( ElementLayout . class ) ) != null ) { if ( layout . linked && layout . shared == shared && layout . layoutName . equals ( layoutName ) ) { CWFException . raise ( "Circular reference to layout " + layoutName ) ; } } |
public class HttpsFileUploader { /** * Uploads a file . This is a convenience method of the more general
* { @ link # upload ( com . addicticks . net . httpsupload . HttpsFileUploaderConfig , java . util . List , java . util . Map , com . addicticks . net . httpsupload . UploadProgress ) upload ( . . . ) } method .
* This method only uploads a single file and expects the destination field for
* the file on the server to be named { @ code " file " } . The MIME type of the
* file will be guessed from < code > uploadFile < / code > argument using
* { @ link java . net . URLConnection # guessContentTypeFromName ( java . lang . String ) URLConnection # guessContentTypeFromName ( ) } .
* If this fails , { @ link # DEFAULT _ MIME _ TYPE } will be used .
* < p > After the method returns the result should be examined for errors .
* @ see # upload ( com . addicticks . net . httpsupload . HttpsFileUploaderConfig , java . util . List , java . util . Map , com . addicticks . net . httpsupload . UploadProgress )
* @ param config configuration for the connection .
* @ param uploadFile file to upload
* @ return result of the upload operation
* @ throws IOException if the endpoint cannot be reached or if input file cannot be
* read . */
public static HttpsFileUploaderResult upload ( HttpsFileUploaderConfig config , File uploadFile ) throws IOException { } } | return upload ( config , Collections . singletonList ( new UploadItemFile ( uploadFile ) ) , null , null ) ; |
public class SoundStore { /** * Get a MOD sound ( mod / xm etc )
* @ param ref The stream to the MOD to load
* @ param in The stream to the MOD to load
* @ return The sound for play back
* @ throws IOException Indicates a failure to read the data */
public Audio getMOD ( String ref , InputStream in ) throws IOException { } } | if ( ! soundWorks ) { return new NullAudio ( ) ; } if ( ! inited ) { throw new RuntimeException ( "Can't load sounds until SoundStore is init(). Use the container init() method." ) ; } if ( deferred ) { return new DeferredSound ( ref , in , DeferredSound . MOD ) ; } return new MODSound ( this , in ) ; |
public class BaseFacebookClient { /** * Appends the given { @ code parameter } to the given { @ code parameters } array .
* @ param parameter
* The parameter value to append .
* @ param parameters
* The parameters to which the given { @ code parameter } is appended .
* @ return A new array which contains both { @ code parameter } and { @ code parameters } . */
protected Parameter [ ] parametersWithAdditionalParameter ( Parameter parameter , Parameter ... parameters ) { } } | Parameter [ ] updatedParameters = new Parameter [ parameters . length + 1 ] ; System . arraycopy ( parameters , 0 , updatedParameters , 0 , parameters . length ) ; updatedParameters [ parameters . length ] = parameter ; return updatedParameters ; |
public class RoadNetworkConstants { /** * Set the preferred distance below which roads may be connected .
* @ param distance is the preferred distance ( in meters ) below which roads may be connected . */
public static void setPreferredRoadConnectionDistance ( double distance ) { } } | final Preferences prefs = Preferences . userNodeForPackage ( RoadNetworkConstants . class ) ; if ( prefs != null ) { if ( distance <= 0. ) { prefs . remove ( "ROAD_CONNECTION_DISTANCE" ) ; // $ NON - NLS - 1 $
} else { prefs . putDouble ( "ROAD_CONNECTION_DISTANCE" , distance ) ; // $ NON - NLS - 1 $
} } |
public class Stream { /** * If specified map is null , returns an empty { @ code Stream } ,
* otherwise returns a { @ code Stream } containing entries of this map .
* @ param < K > the type of map keys
* @ param < V > the type of map values
* @ param map the map with elements to be passed to stream
* @ return the new stream
* @ since 1.1.9 */
@ NotNull public static < K , V > Stream < Map . Entry < K , V > > ofNullable ( @ Nullable Map < K , V > map ) { } } | return ( map == null ) ? Stream . < Map . Entry < K , V > > empty ( ) : Stream . of ( map ) ; |
public class MultiLayerNetwork { /** * See { @ link # rnnTimeStep ( INDArray ) } for details < br >
* If no memory workspace is provided , the output will be detached ( not in any workspace ) . < br >
* If a memory workspace is provided , the output activation array ( i . e . , the INDArray returned by this method )
* will be placed in the specified workspace . This workspace must be opened by the user before calling this method -
* and the user is responsible for ( a ) closing this workspace , and ( b ) ensuring the output array is not used out
* of scope ( i . e . , not used after closing the workspace to which it belongs - as this is likely to cause either
* an exception when used , or a crash ) .
* @ param input Input activations
* @ param outputWorkspace Output workspace . May be null
* @ return The output / activations from the network ( either detached or in the specified workspace if provided ) */
public INDArray rnnTimeStep ( INDArray input , MemoryWorkspace outputWorkspace ) { } } | try { boolean inputIs2d = input . rank ( ) == 2 ; INDArray out = outputOfLayerDetached ( false , FwdPassType . RNN_TIMESTEP , layers . length - 1 , input , null , null , outputWorkspace ) ; if ( inputIs2d && out . rank ( ) == 3 && layers [ layers . length - 1 ] . type ( ) == Type . RECURRENT ) { // Return 2d output with shape [ miniBatchSize , nOut ]
// instead of 3d output with shape [ miniBatchSize , nOut , 1]
return out . tensorAlongDimension ( 0 , 1 , 0 ) ; } return out ; } catch ( OutOfMemoryError e ) { CrashReportingUtil . writeMemoryCrashDump ( this , e ) ; throw e ; } |
public class FastQueue { /** * Returns a new element of data . If there are new data elements available then array will
* automatically grow .
* @ return A new instance . */
public T grow ( ) { } } | if ( size < data . length ) { return data [ size ++ ] ; } else { growArray ( ( data . length + 1 ) * 2 ) ; return data [ size ++ ] ; } |
public class Fibers { /** * Creates a new Fiber subclassing the Fiber class and overriding the { @ link Fiber # run ( ) run } method .
* The new fiber has no name , and uses the default initial stack size .
* @ param scheduler The scheduler pool in which the fiber should run .
* @ throws NullPointerException when proto is null
* @ throws IllegalArgumentException when stackSize is & lt ; = 0 */
public static < V > Fiber < V > start ( FiberScheduler scheduler ) { } } | return Fibers . < V > create ( scheduler ) . start ( ) ; |
public class AsynchDeletionThread { /** * Method isStopping - Determine if the AsynchDeletionThread is stopping or not .
* @ return boolean */
public boolean isStopping ( ) { } } | if ( tc . isEntryEnabled ( ) ) { SibTr . entry ( tc , "isStopping" ) ; SibTr . exit ( tc , "isStopping" , new Boolean ( _isStopping ) ) ; } return _isStopping ; |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.