signature stringlengths 43 39.1k | implementation stringlengths 0 450k |
|---|---|
public class ReplicationRule { /** * Sets the { @ link ReplicationFilter } that is used to identify objects that a CRR Rule applies to .
* A rule cannot have both { @ link ReplicationFilter } and the deprecated { @ link # prefix } .
* @ param filter { @ link ReplicationFilter } */
public void setFilter ( ReplicationFilter filter ) { } } | if ( filter == null ) { throw new IllegalArgumentException ( "Filter cannot be null for a replication rule" ) ; } if ( prefix != null ) { throw new IllegalArgumentException ( "You cannot use both prefix and filter at the same time in a replication rule" ) ; } this . filter = filter ; |
public class OptionsDoclet { /** * Given a command - line option of this doclet , returns the number of arguments you must specify on
* the command line for the given option . Returns 0 if the argument is not recognized . This method
* is automatically invoked by Javadoc .
* @ param option the command - line option
* @ return the number of command - line arguments needed when using the option
* @ see < a
* href = " https : / / docs . oracle . com / javase / 8 / docs / technotes / guides / javadoc / doclet / overview . html " > Doclet
* overview < / a > */
public static int optionLength ( String option ) { } } | if ( option . equals ( "-help" ) ) { System . out . printf ( USAGE ) ; return 1 ; } if ( option . equals ( "-i" ) || option . equals ( "-classdoc" ) || option . equals ( "-singledash" ) ) { return 1 ; } if ( option . equals ( "-docfile" ) || option . equals ( "-outfile" ) || option . equals ( "-format" ) || option . equals ( "-d" ) ) { return 2 ; } return 0 ; |
public class Interceptors { /** * The same as addGlobalActionInterceptor . It is used to compatible with earlier version of jfinal */
public Interceptors add ( Interceptor globalActionInterceptor ) { } } | if ( globalActionInterceptor == null ) { throw new IllegalArgumentException ( "globalActionInterceptor can not be null." ) ; } InterceptorManager . me ( ) . addGlobalActionInterceptor ( globalActionInterceptor ) ; return this ; |
public class AbstractMessage { /** * / * ( non - Javadoc )
* @ see javax . jms . Message # getStringProperty ( java . lang . String ) */
@ Override public final String getStringProperty ( String name ) throws JMSException { } } | return MessageConvertTools . asString ( getProperty ( name ) ) ; |
public class OpenPgpPubSubUtil { /** * Consult the public key metadata node and fetch a list of all of our published OpenPGP public keys .
* @ see < a href = " https : / / xmpp . org / extensions / xep - 0373 . html # discover - pubkey - list " >
* XEP - 0373 § 4.3 : Discovering Public Keys of a User < / a >
* @ param connection XMPP connection
* @ return content of our metadata node .
* @ throws InterruptedException if the thread gets interrupted .
* @ throws XMPPException . XMPPErrorException in case of an XMPP protocol exception .
* @ throws PubSubException . NotAPubSubNodeException in case the queried entity is not a PubSub node
* @ throws PubSubException . NotALeafNodeException in case the queried node is not a { @ link LeafNode }
* @ throws SmackException . NotConnectedException in case we are not connected
* @ throws SmackException . NoResponseException in case the server doesn ' t respond */
public static PublicKeysListElement fetchPubkeysList ( XMPPConnection connection ) throws InterruptedException , XMPPException . XMPPErrorException , PubSubException . NotAPubSubNodeException , PubSubException . NotALeafNodeException , SmackException . NotConnectedException , SmackException . NoResponseException { } } | return fetchPubkeysList ( connection , null ) ; |
import java . util . ArrayList ; import java . util . Collections ; import java . util . HashSet ; import java . util . List ; public class Main { /** * Return the unique elements in a list , sorted in ascending order
* > > > distinct _ sorted _ elements ( [ 5 , 3 , 5 , 2 , 3 , 3 , 9 , 0 , 123 ] )
* [ 0 , 2 , 3 , 5 , 9 , 123] */
public static List < Integer > distinctSortedElements ( List < Integer > array ) { } public static void main ( String [ ] args ) { ArrayList < Integer > array = new ArrayList < Integer > ( Arrays . asList ( 5 , 3 , 5 , 2 , 3 , 3 , 9 , 0 , 123 ) ) ; System . out . println ( distinctSortedElements ( array ) ) ; } } | HashSet < Integer > set = new HashSet < Integer > ( array ) ; ArrayList < Integer > distinctSortedArray = new ArrayList < Integer > ( set ) ; Collections . sort ( distinctSortedArray ) ; return distinctSortedArray ; |
public class ObjectFactory { /** * Create an instance of { @ link JAXBElement } { @ code < } { @ link ConstantType } { @ code > } } */
@ XmlElementDecl ( namespace = "http://www.w3.org/1998/Math/MathML" , name = "rationals" ) public JAXBElement < ConstantType > createRationals ( ConstantType value ) { } } | return new JAXBElement < ConstantType > ( _Rationals_QNAME , ConstantType . class , null , value ) ; |
public class PostalAddress { /** * < pre >
* Optional . Additional , country - specific , sorting code . This is not used
* in most regions . Where it is used , the value is either a string like
* " CEDEX " , optionally followed by a number ( e . g . " CEDEX 7 " ) , or just a number
* alone , representing the " sector code " ( Jamaica ) , " delivery area indicator "
* ( Malawi ) or " post office indicator " ( e . g . Côte d ' Ivoire ) .
* < / pre >
* < code > string sorting _ code = 5 ; < / code > */
public java . lang . String getSortingCode ( ) { } } | java . lang . Object ref = sortingCode_ ; if ( ref instanceof java . lang . String ) { return ( java . lang . String ) ref ; } else { com . google . protobuf . ByteString bs = ( com . google . protobuf . ByteString ) ref ; java . lang . String s = bs . toStringUtf8 ( ) ; sortingCode_ = s ; return s ; } |
public class GqClientSessionImpl { /** * ClientAuthSession Implementation methods - - - - - */
@ Override public void sendAbortSessionAnswer ( AbortSessionAnswer answer ) throws InternalException , IllegalDiameterStateException , RouteException , OverloadException { } } | send ( Event . Type . SEND_SESSION_ABORT_ANSWER , answer ) ; |
public class DatabaseDAODefaultImpl { private String stringArray2String ( String [ ] array ) { } } | StringBuilder sb = new StringBuilder ( "" ) ; for ( int i = 0 ; i < array . length ; i ++ ) { sb . append ( array [ i ] ) ; if ( i < array . length - 1 ) sb . append ( "\n" ) ; } return sb . toString ( ) ; |
public class GenericsUtils { /** * Order variables for consequent variables resolution ( to support reverse order declaration cases ) .
* E . g . { @ code T extends List < K > , K , P extends Collection < T > } must be resolved as 2 , 1 , 3 or
* { @ code T extends List < D > , D extends Collection < P > , P } must be resolved as 3 , 2 , 1 or
* { @ code T extends List < D > , P , D extends Collection < P > } must be resolved as 2 , 3 , 1
* ( otherwise resolution will fail due to unknown generic ) .
* Note : incomplete set of variables could be provided : method order only provided vars , ignoring all
* other variables ( assuming they are known ) . This allows using this method inside error handler
* ( in order to process only not recognized vars ) .
* @ param variables variables to order
* @ return variables ordered for correct types resolution */
public static List < TypeVariable > orderVariablesForResolution ( final List < TypeVariable > variables ) { } } | final List < TypeVariable > vars = new ArrayList < TypeVariable > ( variables ) ; final List < String > countableNames = new ArrayList < String > ( ) ; for ( TypeVariable var : variables ) { countableNames . add ( var . getName ( ) ) ; } final List < String > known = new ArrayList < String > ( ) ; final List < TypeVariable > res = new ArrayList < TypeVariable > ( ) ; // cycle will definitely end because java compiler does not allow to specify generic cycles
while ( ! vars . isEmpty ( ) ) { final Iterator < TypeVariable > it = vars . iterator ( ) ; while ( it . hasNext ( ) ) { final TypeVariable var = it . next ( ) ; boolean reject = false ; for ( Type bound : var . getBounds ( ) ) { // can ' t be empty as otherwise variables would not be here
final List < TypeVariable > unknowns = GenericsUtils . findVariables ( bound ) ; for ( TypeVariable unknown : unknowns ) { if ( countableNames . contains ( unknown . getName ( ) ) && ! known . contains ( unknown . getName ( ) ) ) { reject = true ; break ; } } } if ( ! reject ) { res . add ( var ) ; known . add ( var . getName ( ) ) ; it . remove ( ) ; } } } return res ; |
public class Duration { /** * { @ inheritDoc } */
@ Override public int compareTo ( Duration rhs ) { } } | if ( m_units != rhs . m_units ) { rhs = convertUnits ( rhs . m_duration , rhs . m_units , m_units , ( 8 * 60 ) , ( 5 * 8 * 60 ) , 20 ) ; } return durationComponentEquals ( rhs ) ? 0 : m_duration < rhs . m_duration ? - 1 : 1 ; |
public class ImageComponent { /** * Sets the image to be displayed .
* @ param img
* the image to be displayed */
public void setImage ( BufferedImage img ) { } } | this . img = img ; Dimension dim ; if ( img != null ) { dim = new Dimension ( img . getWidth ( ) , img . getHeight ( ) ) ; } else { dim = new Dimension ( DEFAULT_WIDTH , DEFAULT_HEIGHT ) ; } setSize ( dim ) ; setPreferredSize ( dim ) ; repaint ( ) ; |
public class Strands { /** * Disables the current strand for scheduling purposes , until
* the specified deadline , unless the permit is available .
* If the permit is available then it is consumed and the call
* returns immediately ; otherwise the current strand becomes disabled
* for scheduling purposes and lies dormant until one of four
* things happens :
* < ul >
* < li > Some other strand invokes { @ link # unpark unpark } with the
* current strand as the target ; or
* < li > Some other strand interrupts the
* current strand ; or
* < li > The specified deadline passes ; or
* < li > The call spuriously ( that is , for no reason ) returns .
* < / ul >
* This method does < em > not < / em > report which of these caused the
* method to return . Callers should re - check the conditions which caused
* the strand to park in the first place . Callers may also determine ,
* for example , the interrupt status of the strand , or the current time
* upon return .
* @ param blocker the synchronization object responsible for this strand parking
* @ param deadline the absolute time , in milliseconds from the Epoch , to wait until */
public static void parkUntil ( Object blocker , long deadline ) { } } | try { Strand . parkUntil ( blocker , deadline ) ; } catch ( SuspendExecution e ) { throw RuntimeSuspendExecution . of ( e ) ; } |
public class Ui { /** * Modulate the color to new alpha
* @ param color Color
* @ param alpha Modulate alpha
* @ return Modulate alpha color */
public static int modulateColorAlpha ( int color , int alpha ) { } } | int colorAlpha = color >>> 24 ; int scale = alpha + ( alpha >> 7 ) ; int newAlpha = colorAlpha * scale >> 8 ; int r = ( color >> 16 ) & 0xFF ; int g = ( color >> 8 ) & 0xFF ; int b = color & 0xFF ; return newAlpha << 24 | r << 16 | g << 8 | b ; |
public class ConnectionMonitorsInner { /** * Starts the specified connection monitor .
* @ param resourceGroupName The name of the resource group containing Network Watcher .
* @ param networkWatcherName The name of the Network Watcher resource .
* @ param connectionMonitorName The name of the connection monitor .
* @ param serviceCallback the async ServiceCallback to handle successful and failed responses .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the { @ link ServiceFuture } object */
public ServiceFuture < Void > startAsync ( String resourceGroupName , String networkWatcherName , String connectionMonitorName , final ServiceCallback < Void > serviceCallback ) { } } | return ServiceFuture . fromResponse ( startWithServiceResponseAsync ( resourceGroupName , networkWatcherName , connectionMonitorName ) , serviceCallback ) ; |
public class JettySetting { /** * 创建Hulu的ServletHolder
* @ return Hulu的ServletHolder */
private static ServletHolder createHuluServletHolder ( ) { } } | ServletHolder servletHolder = new ServletHolder ( ActionServlet . class ) ; servletHolder . setAsyncSupported ( false ) ; servletHolder . setInitOrder ( 1 ) ; return servletHolder ; |
public class MapBackedClassLoader { /** * Javadocs recommend that this method not be overloaded . We overload this so that we can prioritise the fastFindClass
* over method calls to parent . loadClass ( name , false ) ; and c = findBootstrapClass0 ( name ) ; which the default implementation
* would first - hence why we call it " fastFindClass " instead of standard findClass , this indicates that we give it a
* higher priority than normal . */
public synchronized Class loadClass ( final String name , final boolean resolve ) throws ClassNotFoundException { } } | Class clazz = fastFindClass ( name ) ; if ( clazz == null ) { final ClassLoader parent = getParent ( ) ; if ( parent != null ) { clazz = Class . forName ( name , true , parent ) ; } } if ( resolve ) { resolveClass ( clazz ) ; } return clazz ; |
public class CommerceTaxMethodPersistenceImpl { /** * Returns a range of all the commerce tax methods .
* Useful when paginating results . Returns a maximum of < code > end - start < / code > instances . < code > start < / code > and < code > end < / code > are not primary keys , they are indexes in the result set . Thus , < code > 0 < / code > refers to the first result in the set . Setting both < code > start < / code > and < code > end < / code > to { @ link QueryUtil # ALL _ POS } will return the full result set . If < code > orderByComparator < / code > is specified , then the query will include the given ORDER BY logic . If < code > orderByComparator < / code > is absent and pagination is required ( < code > start < / code > and < code > end < / code > are not { @ link QueryUtil # ALL _ POS } ) , then the query will include the default ORDER BY logic from { @ link CommerceTaxMethodModelImpl } . If both < code > orderByComparator < / code > and pagination are absent , for performance reasons , the query will not have an ORDER BY clause and the returned result set will be sorted on by the primary key in an ascending order .
* @ param start the lower bound of the range of commerce tax methods
* @ param end the upper bound of the range of commerce tax methods ( not inclusive )
* @ return the range of commerce tax methods */
@ Override public List < CommerceTaxMethod > findAll ( int start , int end ) { } } | return findAll ( start , end , null ) ; |
public class druidGParser { /** * druidG . g : 265:1 : anyValue returns [ Object obj ] : ( a = SINGLE _ QUOTE _ STRING | b = ( LONG | FLOAT ) ) ; */
public final Object anyValue ( ) throws RecognitionException { } } | Object obj = null ; Token a = null ; Token b = null ; try { // druidG . g : 266:2 : ( a = SINGLE _ QUOTE _ STRING | b = ( LONG | FLOAT ) )
int alt128 = 2 ; int LA128_0 = input . LA ( 1 ) ; if ( ( LA128_0 == SINGLE_QUOTE_STRING ) ) { alt128 = 1 ; } else if ( ( LA128_0 == FLOAT || LA128_0 == LONG ) ) { alt128 = 2 ; } else { NoViableAltException nvae = new NoViableAltException ( "" , 128 , 0 , input ) ; throw nvae ; } switch ( alt128 ) { case 1 : // druidG . g : 266:3 : a = SINGLE _ QUOTE _ STRING
{ a = ( Token ) match ( input , SINGLE_QUOTE_STRING , FOLLOW_SINGLE_QUOTE_STRING_in_anyValue1912 ) ; obj = unquote ( ( a != null ? a . getText ( ) : null ) ) ; } break ; case 2 : // druidG . g : 266:53 : b = ( LONG | FLOAT )
{ b = input . LT ( 1 ) ; if ( input . LA ( 1 ) == FLOAT || input . LA ( 1 ) == LONG ) { input . consume ( ) ; state . errorRecovery = false ; } else { MismatchedSetException mse = new MismatchedSetException ( null , input ) ; throw mse ; } obj = ( b != null ? b . getText ( ) : null ) ; } break ; } } catch ( RecognitionException re ) { reportError ( re ) ; recover ( input , re ) ; } finally { // do for sure before leaving
} return obj ; |
public class AbstractListPreference { /** * Initializes the list preference .
* @ param attributeSet
* The attribute set , which should be used to initialize the list preferences , as an
* instance of the type { @ link AttributeSet } or null , if no attributes should be
* obtained
* @ param defaultStyle
* The default style to apply to this preference . If 0 , no style will be applied ( beyond
* what is included in the theme ) . This may either be an attribute resource , whose value
* will be retrieved from the current theme , or an explicit style resource
* @ param defaultStyleResource
* A resource identifier of a style resource that supplies default values for the
* preference , used only if the default style is 0 or can not be found in the theme . Can
* be 0 to not look for defaults */
private void initialize ( @ Nullable final AttributeSet attributeSet , @ AttrRes final int defaultStyle , @ StyleRes final int defaultStyleResource ) { } } | entries = new CharSequence [ 0 ] ; entryValues = new CharSequence [ 0 ] ; obtainStyledAttributes ( attributeSet , defaultStyle , defaultStyleResource ) ; |
public class AbstractInstanceRegistry { /** * Registers a new instance with a given duration .
* @ see com . netflix . eureka . lease . LeaseManager # register ( java . lang . Object , int , boolean ) */
public void register ( InstanceInfo registrant , int leaseDuration , boolean isReplication ) { } } | try { read . lock ( ) ; Map < String , Lease < InstanceInfo > > gMap = registry . get ( registrant . getAppName ( ) ) ; REGISTER . increment ( isReplication ) ; if ( gMap == null ) { final ConcurrentHashMap < String , Lease < InstanceInfo > > gNewMap = new ConcurrentHashMap < String , Lease < InstanceInfo > > ( ) ; gMap = registry . putIfAbsent ( registrant . getAppName ( ) , gNewMap ) ; if ( gMap == null ) { gMap = gNewMap ; } } Lease < InstanceInfo > existingLease = gMap . get ( registrant . getId ( ) ) ; // Retain the last dirty timestamp without overwriting it , if there is already a lease
if ( existingLease != null && ( existingLease . getHolder ( ) != null ) ) { Long existingLastDirtyTimestamp = existingLease . getHolder ( ) . getLastDirtyTimestamp ( ) ; Long registrationLastDirtyTimestamp = registrant . getLastDirtyTimestamp ( ) ; logger . debug ( "Existing lease found (existing={}, provided={}" , existingLastDirtyTimestamp , registrationLastDirtyTimestamp ) ; // this is a > instead of a > = because if the timestamps are equal , we still take the remote transmitted
// InstanceInfo instead of the server local copy .
if ( existingLastDirtyTimestamp > registrationLastDirtyTimestamp ) { logger . warn ( "There is an existing lease and the existing lease's dirty timestamp {} is greater" + " than the one that is being registered {}" , existingLastDirtyTimestamp , registrationLastDirtyTimestamp ) ; logger . warn ( "Using the existing instanceInfo instead of the new instanceInfo as the registrant" ) ; registrant = existingLease . getHolder ( ) ; } } else { // The lease does not exist and hence it is a new registration
synchronized ( lock ) { if ( this . expectedNumberOfClientsSendingRenews > 0 ) { // Since the client wants to register it , increase the number of clients sending renews
this . expectedNumberOfClientsSendingRenews = this . expectedNumberOfClientsSendingRenews + 1 ; updateRenewsPerMinThreshold ( ) ; } } logger . debug ( "No previous lease information found; it is new registration" ) ; } Lease < InstanceInfo > lease = new Lease < InstanceInfo > ( registrant , leaseDuration ) ; if ( existingLease != null ) { lease . setServiceUpTimestamp ( existingLease . getServiceUpTimestamp ( ) ) ; } gMap . put ( registrant . getId ( ) , lease ) ; synchronized ( recentRegisteredQueue ) { recentRegisteredQueue . add ( new Pair < Long , String > ( System . currentTimeMillis ( ) , registrant . getAppName ( ) + "(" + registrant . getId ( ) + ")" ) ) ; } // This is where the initial state transfer of overridden status happens
if ( ! InstanceStatus . UNKNOWN . equals ( registrant . getOverriddenStatus ( ) ) ) { logger . debug ( "Found overridden status {} for instance {}. Checking to see if needs to be add to the " + "overrides" , registrant . getOverriddenStatus ( ) , registrant . getId ( ) ) ; if ( ! overriddenInstanceStatusMap . containsKey ( registrant . getId ( ) ) ) { logger . info ( "Not found overridden id {} and hence adding it" , registrant . getId ( ) ) ; overriddenInstanceStatusMap . put ( registrant . getId ( ) , registrant . getOverriddenStatus ( ) ) ; } } InstanceStatus overriddenStatusFromMap = overriddenInstanceStatusMap . get ( registrant . getId ( ) ) ; if ( overriddenStatusFromMap != null ) { logger . info ( "Storing overridden status {} from map" , overriddenStatusFromMap ) ; registrant . setOverriddenStatus ( overriddenStatusFromMap ) ; } // Set the status based on the overridden status rules
InstanceStatus overriddenInstanceStatus = getOverriddenInstanceStatus ( registrant , existingLease , isReplication ) ; registrant . setStatusWithoutDirty ( overriddenInstanceStatus ) ; // If the lease is registered with UP status , set lease service up timestamp
if ( InstanceStatus . UP . equals ( registrant . getStatus ( ) ) ) { lease . serviceUp ( ) ; } registrant . setActionType ( ActionType . ADDED ) ; recentlyChangedQueue . add ( new RecentlyChangedItem ( lease ) ) ; registrant . setLastUpdatedTimestamp ( ) ; invalidateCache ( registrant . getAppName ( ) , registrant . getVIPAddress ( ) , registrant . getSecureVipAddress ( ) ) ; logger . info ( "Registered instance {}/{} with status {} (replication={})" , registrant . getAppName ( ) , registrant . getId ( ) , registrant . getStatus ( ) , isReplication ) ; } finally { read . unlock ( ) ; } |
public class StatefulPassivator { /** * Passivates the object to the bean store . < p >
* @ param obj BeanO that corresponds to the bean to passivate < p > */
public void passivate ( StatefulBeanO beanO , BeanMetaData bmd ) // d648122
throws RemoteException { } } | final boolean isTraceOn = TraceComponent . isAnyTracingEnabled ( ) ; if ( isTraceOn && tc . isEntryEnabled ( ) ) // 130050
Tr . entry ( tc , "passivate: " + beanO ) ; if ( beanO . isRemoved ( ) || isTerminating ( ) ) { if ( isTraceOn && tc . isEventEnabled ( ) ) // 130050
Tr . event ( tc , "Bean removed!" ) ; return ; } BeanId bid = beanO . getId ( ) ; Object sb = beanO . ivEjbInstance ; // d367572.7
boolean exceptionCaught = false ; // 155114
ObjectOutputStream beanStream = null ; // 155114
// LI2775-107.2 Begins WS18354.02a , MD19305C
Object credToken = ivContainer . getEJBRuntime ( ) . pushServerIdentity ( ) ; // LI2775-107.2 Ends
ObjectOutputStream beanStream2 = null ; // d430549.11
// Get the JPAExPcBindingContext for this SFSB .
Object exPC = beanO . getJPAExPcBindingContext ( ) ; // d468174
try { // LIDB2018-1 begins
// Check whether SFSB failover is enabled . Note , we could
// factor out this code so it is the same regardless of
// whether SFSB failover is enabled or not . However , to
// ensure no impact to existing performance when not enabled ,
// we chose to not factor into a common implementation .
ByteArrayOutputStream baos = null ; long lastAccessTime = beanO . getLastAccessTime ( ) ; // PK69093 - Only allow one stream to be open for writing at one time .
synchronized ( ivPassivateLock ) { if ( beanO . sfsbFailoverEnabled ( ) ) { // For failover , initially write to a ByteArrayOutputStream . This will later be converted to
// bytes and then persisted
if ( isTraceOn && tc . isDebugEnabled ( ) ) Tr . debug ( tc , "failover is enabled" ) ; // If the EJB module is 3.0 or greater , use the new format , which is later
if ( getEJBModuleVersion ( beanO ) >= BeanMetaData . J2EE_EJB_VERSION_3_0 ) { // Set up the bean stream to initially write to a ByteArrayOutputStream
baos = new ByteArrayOutputStream ( 1024 ) ; beanStream = createPassivationOutputStream ( new GZIPOutputStream ( baos ) ) ; } else { // pre - 3.0 module - Use old format since we may be in a mixed cluster environment
if ( isTraceOn && tc . isDebugEnabled ( ) ) Tr . debug ( tc , "processing EJB 2.1 module or prior" ) ; // Serialize and compress the data in SFSB .
byte [ ] bytes = getCompressedBytes ( sb , lastAccessTime , exPC ) ; // Get a file outstream that does not compress the data
// since we already have the data compressed .
OutputStream ostream = ivBeanStore . getOutputStream ( bid ) ; beanStream = createPassivationOutputStream ( ostream ) ; // Write length of compressed data .
beanStream . writeInt ( bytes . length ) ; if ( isTraceOn && tc . isDebugEnabled ( ) ) Tr . debug ( tc , "length of compressed bytes is: " + bytes . length ) ; // Write compressed data to file .
beanStream . write ( bytes ) ; beanStream . close ( ) ; // Replicate compressed data to failover servers by calling
// the method that updates the failover cache entry for this SFSB .
beanO . updateFailoverEntry ( bytes , lastAccessTime ) ; return ; } } else // failover not enabled - setup the beanStream
{ if ( isTraceOn && tc . isDebugEnabled ( ) ) Tr . debug ( tc , "failover is NOT enabled" ) ; beanStream = createPassivationOutputStream ( ivBeanStore . getGZIPOutputStream ( bid ) ) ; } // Passivate sb in the 3.0 format
EJBObjectInfo objectInfo = null ; Map < String , Map < String , Field > > passivatorFields = getPassivatorFields ( bmd ) ; // d648122
if ( sb instanceof Serializable ) { objectInfo = createSerializableObjectInfo ( sb ) ; } else { objectInfo = createNonSerializableObjectInfo ( sb , passivatorFields ) ; // d648122
} // Write last access time .
beanStream . writeLong ( lastAccessTime ) ; // Write the persistence context
beanStream . writeObject ( exPC ) ; // Write the SFSB state .
beanStream . writeObject ( objectInfo ) ; // Write the managed object state of the EJB instance
writeManagedObjectContext ( beanStream , beanO . ivEjbManagedObjectContext ) ; Object [ ] interceptors = beanO . ivInterceptors ; if ( interceptors == null ) { beanStream . writeInt ( - 1 ) ; } else { if ( isTraceOn && tc . isDebugEnabled ( ) ) Tr . debug ( tc , "Processing " + interceptors . length + " interceptors" ) ; beanStream . writeInt ( interceptors . length ) ; for ( int i = 0 ; i < interceptors . length ; i ++ ) { Object interceptor = interceptors [ i ] ; EJBObjectInfo interceptorObjectInfo = null ; if ( interceptor instanceof Serializable ) { interceptorObjectInfo = createSerializableObjectInfo ( interceptor ) ; } else { interceptorObjectInfo = createNonSerializableObjectInfo ( interceptor , passivatorFields ) ; // d648122
} beanStream . writeObject ( interceptorObjectInfo ) ; } } beanStream . close ( ) ; // If failover is enabled , now convert to a byte array and persist it
if ( beanO . sfsbFailoverEnabled ( ) && baos != null ) // d468174
{ byte [ ] bytes = baos . toByteArray ( ) ; beanStream2 = createPassivationOutputStream ( ivBeanStore . getOutputStream ( bid ) ) ; beanStream2 . writeInt ( bytes . length ) ; beanStream2 . write ( bytes ) ; beanStream2 . close ( ) ; // Replicate compressed data to failover cache servers by calling
// the method that updates the failover cache entry for this SFSB .
beanO . updateFailoverEntry ( bytes , lastAccessTime ) ; } // LIDB2018-1 ends
} } catch ( CSIException ex ) { exceptionCaught = true ; // 155114
FFDCFilter . processException ( ex , CLASS_NAME + ".passivate" , "113" , this ) ; throw new RemoteException ( "passivation failed" , ex ) ; } catch ( Throwable e ) // 155114
{ // d584932 - Catch Throwable , not Exception , to ensure that we
// set exceptionCaught when Errors are thrown .
exceptionCaught = true ; // 155114
FFDCFilter . processException ( e , CLASS_NAME + ".passivate" , "107" , this ) ; Tr . warning ( tc , "CANNOT_PASSIVATE_STATEFUL_BEAN_CNTR0001W" , new Object [ ] { beanO . toString ( ) , this , e } ) ; // p111002.3
throw new RemoteException ( "passivation failed" , e ) ; } finally // 155114
{ // LI2775-107.2 Begins WS18354.02a , MD19305C
if ( credToken != null ) // d646413.2
{ ivContainer . getEJBRuntime ( ) . popServerIdentity ( credToken ) ; } // LI2775-107.2 Ends
if ( exceptionCaught ) // 155114
{ // attempt to close and remove the beanStore
try { if ( beanStream != null ) { beanStream . close ( ) ; // PK69093 - beanStore . remove will access a file , as such lets synch this call
// such that only one file can be open for remove at a time .
synchronized ( ivRemoveLock ) { ivBeanStore . remove ( bid ) ; } } if ( beanStream2 != null ) // d430549.11
{ beanStream2 . close ( ) ; } } catch ( Exception ex ) { if ( isTraceOn && tc . isDebugEnabled ( ) ) Tr . debug ( tc , "exception closing stream" , ex ) ; } } } if ( isTraceOn && tc . isEntryEnabled ( ) ) // 130050
Tr . exit ( tc , "passivate" ) ; |
public class DefaultKeySanitizer { @ Override public Object sanitizeKey ( Object cacheKey ) throws KeySanitationExcepion { } } | if ( ! ( cacheKey instanceof String ) ) { throw new KeySanitationExcepion ( DefaultKeySanitizer . class . getSimpleName ( ) + " can only be used with Strings cache keys." ) ; } try { return Base64 . encodeToString ( ( ( String ) cacheKey ) . getBytes ( UTF8_CHARSET_NAME ) , BASE64_FLAGS ) ; } catch ( UnsupportedEncodingException e ) { throw new KeySanitationExcepion ( e ) ; } |
public class LruCache { /** * Puts all the values from the given map into the cache .
* @ param m The map containing entries to put into the cache */
public synchronized void putAll ( Map < Key , Value > m ) { } } | for ( Map . Entry < Key , Value > entry : m . entrySet ( ) ) { this . put ( entry . getKey ( ) , entry . getValue ( ) ) ; } |
public class ParameterList { /** * Remove all parameters with the specified name .
* @ param paramName the name of parameters to remove */
public final void removeAll ( final String paramName ) { } } | final ParameterList params = getParameters ( paramName ) ; parameters . removeAll ( params . parameters ) ; |
public class CharTrie { /** * Recompute cursor details char trie .
* @ return the char trie */
CharTrie recomputeCursorDetails ( ) { } } | godparentIndex = new int [ getNodeCount ( ) ] ; parentIndex = new int [ getNodeCount ( ) ] ; Arrays . fill ( godparentIndex , 0 , godparentIndex . length , - 1 ) ; Arrays . fill ( parentIndex , 0 , parentIndex . length , - 1 ) ; System . gc ( ) ; recomputeCursorTotals ( root ( ) ) ; System . gc ( ) ; recomputeCursorPositions ( root ( ) , 0 ) ; System . gc ( ) ; return this ; |
public class ExtDirectSpringUtil { /** * Checks if the request is a multipart request
* @ param request the HTTP servlet request
* @ return true if request is a Multipart request ( file upload ) */
public static boolean isMultipart ( HttpServletRequest request ) { } } | if ( ! "post" . equals ( request . getMethod ( ) . toLowerCase ( ) ) ) { return false ; } String contentType = request . getContentType ( ) ; return contentType != null && contentType . toLowerCase ( ) . startsWith ( "multipart/" ) ; |
public class Drawer { /** * Build the nav drawer layout , inflate it , then attach it to the
* activity */
@ SuppressLint ( "NewApi" ) private void buildAndAttach ( ) { } } | // Disable any pending transition on the activity since we are transforming it
mActivity . overridePendingTransition ( 0 , 0 ) ; // Setup window flags if Lollipop
if ( BuildUtils . isLollipop ( ) ) { Window window = mActivity . getWindow ( ) ; window . addFlags ( FLAG_DRAWS_SYSTEM_BAR_BACKGROUNDS ) ; window . setStatusBarColor ( Color . TRANSPARENT ) ; } // attach layout and pane to UI
hiJackDecor ( ) ; // Setup
setupDrawer ( ) ; // Populate and Inflate
populateNavDrawer ( ) ; |
public class DescribeAlarmsForMetricResult { /** * The information for each alarm with the specified metric .
* @ return The information for each alarm with the specified metric . */
public java . util . List < MetricAlarm > getMetricAlarms ( ) { } } | if ( metricAlarms == null ) { metricAlarms = new com . amazonaws . internal . SdkInternalList < MetricAlarm > ( ) ; } return metricAlarms ; |
public class MergeMapParser { /** * Read map .
* @ param filename map file path
* @ param tmpDir temporary directory path , may be { @ code null } */
public void read ( final File filename , final File tmpDir ) { } } | tempdir = tmpDir != null ? tmpDir : filename . getParentFile ( ) ; try { final TransformerHandler s = stf . newTransformerHandler ( ) ; s . getTransformer ( ) . setOutputProperty ( OMIT_XML_DECLARATION , "yes" ) ; s . setResult ( new StreamResult ( output ) ) ; setContentHandler ( s ) ; dirPath = filename . getParentFile ( ) ; reader . setErrorHandler ( new DITAOTXMLErrorHandler ( filename . getAbsolutePath ( ) , logger ) ) ; topicParser . getContentHandler ( ) . startDocument ( ) ; logger . info ( "Processing " + filename . getAbsolutePath ( ) ) ; reader . parse ( filename . toURI ( ) . toString ( ) ) ; topicParser . getContentHandler ( ) . endDocument ( ) ; output . write ( topicBuffer . toByteArray ( ) ) ; } catch ( final RuntimeException e ) { throw e ; } catch ( final Exception e ) { logger . error ( e . getMessage ( ) , e ) ; } |
public class AlluxioJobMaster { /** * Starts the Alluxio job master .
* @ param args command line arguments , should be empty */
public static void main ( String [ ] args ) { } } | if ( args . length != 0 ) { LOG . info ( "java -cp {} {}" , RuntimeConstants . ALLUXIO_JAR , AlluxioJobMaster . class . getCanonicalName ( ) ) ; System . exit ( - 1 ) ; } CommonUtils . PROCESS_TYPE . set ( alluxio . util . CommonUtils . ProcessType . JOB_MASTER ) ; AlluxioJobMasterProcess process ; try { process = AlluxioJobMasterProcess . Factory . create ( ) ; } catch ( Throwable t ) { LOG . error ( "Failed to create job master process" , t ) ; // Exit to stop any non - daemon threads .
System . exit ( - 1 ) ; throw t ; } ProcessUtils . run ( process ) ; |
public class ExtensionScript { /** * Returns a unique name for the given script name */
private String getUniqueScriptName ( String name , String ext ) { } } | if ( this . getScriptImpl ( name ) == null ) { // Its unique
return name ; } // Its not unique , add a suitable index . . .
String stub = name . substring ( 0 , name . length ( ) - ext . length ( ) - 1 ) ; int index = 1 ; do { index ++ ; name = stub + "(" + index + ")." + ext ; } while ( this . getScriptImpl ( name ) != null ) ; return name ; |
public class ListTagsForVaultResult { /** * The tags attached to the vault . Each tag is composed of a key and a value .
* @ param tags
* The tags attached to the vault . Each tag is composed of a key and a value .
* @ return Returns a reference to this object so that method calls can be chained together . */
public ListTagsForVaultResult withTags ( java . util . Map < String , String > tags ) { } } | setTags ( tags ) ; return this ; |
public class ParentEditController { /** * Set a new child edit controller . If there was a previous child controller , then it ' s < code > onDeactivate < / code >
* method will be called first . Likewise on the new controller , the < code > onActivate < / code > will be called .
* @ param controller
* The new child edit controller . A controller usually masters editing for one type of geometry . This
* parameter can also be < code > null < / code > , when no child edit controller is required anymore . If this
* parameter is < code > null < / code > , then this controller < code > onActivate < / code > method is called . */
public void setController ( EditController controller ) { } } | if ( this . controller != null ) { this . controller . onDeactivate ( ) ; } this . controller = controller ; if ( controller != null ) { controller . setMaxBoundsDisplayed ( isMaxBoundsDisplayed ( ) ) ; controller . onActivate ( ) ; } else { onActivate ( ) ; } |
public class KeyVaultClientBaseImpl { /** * Merges a certificate or a certificate chain with a key pair existing on the server .
* The MergeCertificate operation performs the merging of a certificate or certificate chain with a key pair currently available in the service . This operation requires the certificates / create permission .
* @ param vaultBaseUrl The vault name , for example https : / / myvault . vault . azure . net .
* @ param certificateName The name of the certificate .
* @ param x509Certificates The certificate or the certificate chain to merge .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the observable to the CertificateBundle object */
public Observable < CertificateBundle > mergeCertificateAsync ( String vaultBaseUrl , String certificateName , List < byte [ ] > x509Certificates ) { } } | return mergeCertificateWithServiceResponseAsync ( vaultBaseUrl , certificateName , x509Certificates ) . map ( new Func1 < ServiceResponse < CertificateBundle > , CertificateBundle > ( ) { @ Override public CertificateBundle call ( ServiceResponse < CertificateBundle > response ) { return response . body ( ) ; } } ) ; |
public class DynamoDBQueryExpression { /** * Sets one range key condition for this query , using the attribute name of
* the range key . All range key attributes for the table must be specified
* by using { @ link DynamoDBRangeKey } or { @ link DynamoDBIndexRangeKey } annotations
* before executing the query .
* < dl >
* < dt > If the attribute is the primary range key < / dt >
* < dd > users should NOT set any index name for this query . < / dd >
* < dt > If the attribute is an index range key < / dt >
* < dd >
* { @ link DynamoDBMapper } will automatically set the index name if the
* range key is annotated as only used by one local secondary index ,
* otherwise users must set the index name manually by either
* { @ link DynamoDBQueryExpression # setIndexName ( String ) } or
* { @ link DynamoDBQueryExpression # withIndexName ( String ) } .
* < / dd >
* < / dl >
* @ param rangeKeyAttributeName
* This can be either the primary range key of the table or an
* index range key .
* @ param rangeKeyCondition
* Condition specified on the given range key for this query . */
public DynamoDBQueryExpression < T > withRangeKeyCondition ( String rangeKeyAttributeName , Condition rangeKeyCondition ) { } } | if ( rangeKeyConditions == null ) rangeKeyConditions = new HashMap < String , Condition > ( ) ; rangeKeyConditions . put ( rangeKeyAttributeName , rangeKeyCondition ) ; return this ; |
public class CreateDatasetContentRequestMarshaller { /** * Marshall the given parameter object . */
public void marshall ( CreateDatasetContentRequest createDatasetContentRequest , ProtocolMarshaller protocolMarshaller ) { } } | if ( createDatasetContentRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( createDatasetContentRequest . getDatasetName ( ) , DATASETNAME_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class TriggerConfig { /** * The event type or types for which notifications are triggered .
* @ param triggerEvents
* The event type or types for which notifications are triggered .
* @ see TriggerEventType */
public void setTriggerEvents ( java . util . Collection < String > triggerEvents ) { } } | if ( triggerEvents == null ) { this . triggerEvents = null ; return ; } this . triggerEvents = new com . amazonaws . internal . SdkInternalList < String > ( triggerEvents ) ; |
public class EventDAOWrapper { /** * { @ inheritDoc } */
@ Override public < E extends Entity > ObjectStream < E > streamEntities ( EntityQuery < E > eq ) { } } | return wrapper . streamEntities ( eq ) ; |
public class AbstractTransport { /** * Extract parts from Multi - part message .
* @ param multiPart multi - part to visit
* @ return map of part contentType - > part content
* @ throws MessagingException
* @ throws IOException */
static Map < String , Collection < String > > extractTextParts ( Multipart multiPart ) throws MessagingException , IOException { } } | HashMap < String , Collection < String > > bodies = new HashMap < > ( ) ; for ( int i = 0 ; i < multiPart . getCount ( ) ; i ++ ) { checkPartForTextType ( bodies , multiPart . getBodyPart ( i ) ) ; } return bodies ; |
public class OneSideTrim { /** * { @ inheritDoc } */
@ Override public < T > T execute ( final Object value , final CsvContext context ) { } } | if ( value == null ) { return next . execute ( value , context ) ; } final String result = trim ( value . toString ( ) ) ; return next . execute ( result , context ) ; |
public class VerificationConditionGenerator { /** * Flatten a given assumption set into a single logical condition . The key
* challenge here is to try and do this as efficiency as possible .
* @ param assumptions
* @ return */
private WyalFile . Stmt flatten ( AssumptionSet assumptions ) { } } | WyalFile . Stmt result = flattenUpto ( assumptions , null ) ; if ( result == null ) { return new Expr . Constant ( new Value . Bool ( true ) ) ; } else { return result ; } |
public class ObjectId { /** * Converts this instance into a 24 - byte hexadecimal string representation .
* @ return a string representation of the ObjectId in hexadecimal format */
public String toHexString ( ) { } } | final StringBuilder buf = new StringBuilder ( 24 ) ; for ( final byte b : toByteArray ( ) ) { buf . append ( String . format ( "%02x" , b & 0xff ) ) ; } return buf . toString ( ) ; |
public class ShareLinkManager { /** * Invokes a sharing client with a link created by the given json objects .
* @ param selectedResolveInfo The { @ link ResolveInfo } corresponding to the selected sharing client . */
private void invokeSharingClient ( final ResolveInfo selectedResolveInfo ) { } } | isShareInProgress_ = true ; final String channelName = selectedResolveInfo . loadLabel ( context_ . getPackageManager ( ) ) . toString ( ) ; BranchShortLinkBuilder shortLinkBuilder = builder_ . getShortLinkBuilder ( ) ; shortLinkBuilder . generateShortUrlInternal ( new Branch . BranchLinkCreateListener ( ) { @ Override public void onLinkCreate ( String url , BranchError error ) { if ( error == null ) { shareWithClient ( selectedResolveInfo , url , channelName ) ; } else { // If there is a default URL specified share it .
String defaultUrl = builder_ . getDefaultURL ( ) ; if ( defaultUrl != null && defaultUrl . trim ( ) . length ( ) > 0 ) { shareWithClient ( selectedResolveInfo , defaultUrl , channelName ) ; } else { if ( callback_ != null ) { callback_ . onLinkShareResponse ( url , channelName , error ) ; } else { PrefHelper . Debug ( "Unable to share link " + error . getMessage ( ) ) ; } if ( error . getErrorCode ( ) == BranchError . ERR_BRANCH_NO_CONNECTIVITY || error . getErrorCode ( ) == BranchError . ERR_BRANCH_TRACKING_DISABLED ) { shareWithClient ( selectedResolveInfo , url , channelName ) ; } else { cancelShareLinkDialog ( false ) ; isShareInProgress_ = false ; } } } } } , true ) ; |
public class AntBuilder { /** * Determines , when the ANT Task that is represented by the " node " should perform .
* Node must be an ANT Task or no " perform " is called .
* If node is an ANT Task , it performs right after complete construction .
* If node is nested in a TaskContainer , calling " perform " is delegated to that
* TaskContainer .
* @ param parent note : null when node is root
* @ param node the node that now has all its children applied */
protected void nodeCompleted ( final Object parent , final Object node ) { } } | if ( parent == null ) insideTask = false ; antElementHandler . onEndElement ( null , null , antXmlContext ) ; lastCompletedNode = node ; if ( parent != null && ! ( parent instanceof Target ) ) { log . finest ( "parent is not null: no perform on nodeCompleted" ) ; return ; // parent will care about when children perform
} if ( definingTarget != null && definingTarget == parent && node instanceof Task ) return ; // inside defineTarget
if ( definingTarget == node ) { definingTarget = null ; } // as in Target . execute ( )
if ( node instanceof Task ) { Task task = ( Task ) node ; final String taskName = task . getTaskName ( ) ; if ( "antcall" . equals ( taskName ) && parent == null ) { throw new BuildException ( "antcall not supported within AntBuilder, consider using 'ant.project.executeTarget('targetName')' instead." ) ; } if ( saveStreams ) { // save original streams
synchronized ( AntBuilder . class ) { int currentStreamCount = streamCount ++ ; if ( currentStreamCount == 0 ) { // we are first , save the streams
savedProjectInputStream = project . getDefaultInputStream ( ) ; savedIn = System . in ; savedErr = System . err ; savedOut = System . out ; if ( ! ( savedIn instanceof DemuxInputStream ) ) { project . setDefaultInputStream ( savedIn ) ; demuxInputStream = new DemuxInputStream ( project ) ; System . setIn ( demuxInputStream ) ; } demuxOutputStream = new DemuxOutputStream ( project , false ) ; System . setOut ( new PrintStream ( demuxOutputStream ) ) ; demuxErrorStream = new DemuxOutputStream ( project , true ) ; System . setErr ( new PrintStream ( demuxErrorStream ) ) ; } } } try { lastCompletedNode = performTask ( task ) ; } finally { if ( saveStreams ) { synchronized ( AntBuilder . class ) { int currentStreamCount = -- streamCount ; if ( currentStreamCount == 0 ) { // last to leave , turn out the lights : restore original streams
project . setDefaultInputStream ( savedProjectInputStream ) ; System . setOut ( savedOut ) ; System . setErr ( savedErr ) ; if ( demuxInputStream != null ) { System . setIn ( savedIn ) ; DefaultGroovyMethodsSupport . closeQuietly ( demuxInputStream ) ; demuxInputStream = null ; } DefaultGroovyMethodsSupport . closeQuietly ( demuxOutputStream ) ; DefaultGroovyMethodsSupport . closeQuietly ( demuxErrorStream ) ; demuxOutputStream = null ; demuxErrorStream = null ; } } } } // restore dummy collector target
if ( "import" . equals ( taskName ) ) { antXmlContext . setCurrentTarget ( collectorTarget ) ; } } else if ( node instanceof Target ) { // restore dummy collector target
antXmlContext . setCurrentTarget ( collectorTarget ) ; } else { final RuntimeConfigurable r = ( RuntimeConfigurable ) node ; r . maybeConfigure ( project ) ; } |
public class MaterialScrollfire { /** * Executes callback method depending on how far into the page you ' ve scrolled
* @ param element Target element that is being tracked
* @ param offset If this is 0 , the callback will be fired when the selector element is at the very bottom of the user ' s window .
* @ param callback The method to be called when the scrollfire is applied */
public static void apply ( Element element , int offset , Functions . Func callback ) { } } | MaterialScrollfire scrollfire = new MaterialScrollfire ( ) ; scrollfire . setElement ( element ) ; scrollfire . setCallback ( callback ) ; scrollfire . setOffset ( offset ) ; scrollfire . apply ( ) ; |
public class ApiOvhCloud { /** * Get network subnets
* REST : GET / cloud / project / { serviceName } / network / private / { networkId } / subnet
* @ param networkId [ required ] Network id
* @ param serviceName [ required ] Service name */
public ArrayList < OvhSubnet > project_serviceName_network_private_networkId_subnet_GET ( String serviceName , String networkId ) throws IOException { } } | String qPath = "/cloud/project/{serviceName}/network/private/{networkId}/subnet" ; StringBuilder sb = path ( qPath , serviceName , networkId ) ; String resp = exec ( qPath , "GET" , sb . toString ( ) , null ) ; return convertTo ( resp , t6 ) ; |
public class VersionDependency { /** * Returns true if the underlying resource has changed . */
public boolean logModified ( Logger log ) { } } | if ( ! CauchoUtil . getFullVersion ( ) . equals ( _version ) ) { log . info ( "Baratine version has changed to " + CauchoUtil . getFullVersion ( ) ) ; return true ; } else return false ; |
public class LabelProcessor { /** * Get the first label for a languageCode from a label type .
* This method will call { @ link # getLabelListByLanguage ( String , LabelOrBuilder ) } to extract the list of labels
* for the languageCode and then return its first entry .
* @ param languageCode the languageCode which is checked
* @ param label the label type which is searched for labels in the language
* @ return the first label from the label type for the language code .
* @ throws NotAvailableException if no label list for the language code exists or if the list is empty */
public static String getLabelByLanguage ( final String languageCode , final LabelOrBuilder label ) throws NotAvailableException { } } | final List < String > labelList = getLabelListByLanguage ( languageCode , label ) ; if ( labelList . isEmpty ( ) ) { throw new NotAvailableException ( "Label for Language[" + languageCode + "]" ) ; } return labelList . get ( 0 ) ; |
public class CommerceWarehouseLocalServiceBaseImpl { /** * Deletes the commerce warehouse with the primary key from the database . Also notifies the appropriate model listeners .
* @ param commerceWarehouseId the primary key of the commerce warehouse
* @ return the commerce warehouse that was removed
* @ throws PortalException if a commerce warehouse with the primary key could not be found */
@ Indexable ( type = IndexableType . DELETE ) @ Override public CommerceWarehouse deleteCommerceWarehouse ( long commerceWarehouseId ) throws PortalException { } } | return commerceWarehousePersistence . remove ( commerceWarehouseId ) ; |
public class PerspectiveOps { /** * Computes : D = A < sup > T < / sup > * B * C
* @ param A ( Input ) 3x3 matrix
* @ param B ( Input ) 3x3 matrix
* @ param C ( Input ) 3x3 matrix
* @ param output ( Output ) 3x3 matrix . Can be same instance A or B . */
public static void multTranA ( DMatrixRMaj A , DMatrixRMaj B , DMatrixRMaj C , DMatrixRMaj output ) { } } | double t11 = A . data [ 0 ] * B . data [ 0 ] + A . data [ 3 ] * B . data [ 3 ] + A . data [ 6 ] * B . data [ 6 ] ; double t12 = A . data [ 0 ] * B . data [ 1 ] + A . data [ 3 ] * B . data [ 4 ] + A . data [ 6 ] * B . data [ 7 ] ; double t13 = A . data [ 0 ] * B . data [ 2 ] + A . data [ 3 ] * B . data [ 5 ] + A . data [ 6 ] * B . data [ 8 ] ; double t21 = A . data [ 1 ] * B . data [ 0 ] + A . data [ 4 ] * B . data [ 3 ] + A . data [ 7 ] * B . data [ 6 ] ; double t22 = A . data [ 1 ] * B . data [ 1 ] + A . data [ 4 ] * B . data [ 4 ] + A . data [ 7 ] * B . data [ 7 ] ; double t23 = A . data [ 1 ] * B . data [ 2 ] + A . data [ 4 ] * B . data [ 5 ] + A . data [ 7 ] * B . data [ 8 ] ; double t31 = A . data [ 2 ] * B . data [ 0 ] + A . data [ 5 ] * B . data [ 3 ] + A . data [ 8 ] * B . data [ 6 ] ; double t32 = A . data [ 2 ] * B . data [ 1 ] + A . data [ 5 ] * B . data [ 4 ] + A . data [ 8 ] * B . data [ 7 ] ; double t33 = A . data [ 2 ] * B . data [ 2 ] + A . data [ 5 ] * B . data [ 5 ] + A . data [ 8 ] * B . data [ 8 ] ; output . data [ 0 ] = t11 * C . data [ 0 ] + t12 * C . data [ 3 ] + t13 * C . data [ 6 ] ; output . data [ 1 ] = t11 * C . data [ 1 ] + t12 * C . data [ 4 ] + t13 * C . data [ 7 ] ; output . data [ 2 ] = t11 * C . data [ 2 ] + t12 * C . data [ 5 ] + t13 * C . data [ 8 ] ; output . data [ 3 ] = t21 * C . data [ 0 ] + t22 * C . data [ 3 ] + t23 * C . data [ 6 ] ; output . data [ 4 ] = t21 * C . data [ 1 ] + t22 * C . data [ 4 ] + t23 * C . data [ 7 ] ; output . data [ 5 ] = t21 * C . data [ 2 ] + t22 * C . data [ 5 ] + t23 * C . data [ 8 ] ; output . data [ 6 ] = t31 * C . data [ 0 ] + t32 * C . data [ 3 ] + t33 * C . data [ 6 ] ; output . data [ 7 ] = t31 * C . data [ 1 ] + t32 * C . data [ 4 ] + t33 * C . data [ 7 ] ; output . data [ 8 ] = t31 * C . data [ 2 ] + t32 * C . data [ 5 ] + t33 * C . data [ 8 ] ; |
public class MutateInBuilder { /** * Replace an existing value by the given fragment .
* @ param path the path where the value to replace is .
* @ param fragment the new value . */
public < T > MutateInBuilder replace ( String path , T fragment ) { } } | asyncBuilder . replace ( path , fragment ) ; return this ; |
public class PippoSettings { /** * Gets the duration setting and converts it to hours .
* The setting must be use one of the following conventions :
* < ul >
* < li > n MILLISECONDS
* < li > n SECONDS
* < li > n MINUTES
* < li > n HOURS
* < li > n DAYS
* < / ul >
* @ param name
* @ param defaultValue in hours
* @ return hours */
public long getDurationInHours ( String name , long defaultValue ) { } } | TimeUnit timeUnit = extractTimeUnit ( name , defaultValue + " HOURS" ) ; long duration = getLong ( name , defaultValue ) ; return timeUnit . toHours ( duration ) ; |
public class SelfRegisteringRemote { /** * uses the hub API to get some of its configuration .
* @ return json object of the current hub configuration */
private GridHubConfiguration getHubConfiguration ( ) throws Exception { } } | String hubApi = "http://" + registrationRequest . getConfiguration ( ) . getHubHost ( ) + ":" + registrationRequest . getConfiguration ( ) . getHubPort ( ) + "/grid/api/hub" ; URL api = new URL ( hubApi ) ; HttpClient client = httpClientFactory . createClient ( api ) ; String url = api . toExternalForm ( ) ; HttpRequest request = new HttpRequest ( GET , url ) ; HttpResponse response = client . execute ( request ) ; try ( Reader reader = new StringReader ( response . getContentString ( ) ) ; JsonInput jsonInput = new Json ( ) . newInput ( reader ) ) { return GridHubConfiguration . loadFromJSON ( jsonInput ) ; } |
public class ThrowingFunctions { /** * Converts a throwing function and its argument into a { @ link Callable } .
* @ param < T1 > the first parameter type
* @ param < R > the result type
* @ param func a throwing function to convert into a callable
* @ param args the input to apply to the throwing function
* @ return A callable that when called will apply the provided arguments to the provided throwing function */
public static < R > Callable < R > toCallable ( final ThrowingFuncN < ? extends R > func , final Object ... args ) { } } | return new Callable < R > ( ) { @ Override public R call ( ) throws Exception { return func . call ( args ) ; } } ; |
public class SSLCertificateHelper { /** * TODO check */
public static X509Certificate [ ] exportRootCertificates ( final KeyStore ks , final String alias ) throws KeyStoreException { } } | logKeyStore ( ks ) ; final List < X509Certificate > trustedCerts = new ArrayList < X509Certificate > ( ) ; if ( Strings . isNullOrEmpty ( alias ) ) { if ( log . isDebugEnabled ( ) ) { log . debug ( "No alias given, will trust all of the certificates in the store" ) ; } final List < String > aliases = toList ( ks . aliases ( ) ) ; for ( final String _alias : aliases ) { if ( ks . isCertificateEntry ( _alias ) ) { final X509Certificate cert = ( X509Certificate ) ks . getCertificate ( _alias ) ; if ( cert != null ) { trustedCerts . add ( cert ) ; } else { log . error ( "Alias {} does not exist" , _alias ) ; } } } } else { if ( ks . isCertificateEntry ( alias ) ) { final X509Certificate cert = ( X509Certificate ) ks . getCertificate ( alias ) ; if ( cert != null ) { trustedCerts . add ( cert ) ; } else { log . error ( "Alias {} does not exist" , alias ) ; } } else { log . error ( "Alias {} does not contain a certificate entry" , alias ) ; } } return trustedCerts . toArray ( new X509Certificate [ 0 ] ) ; |
public class StreamEx { /** * Return an ordered stream produced by consecutive calls of the supplied
* producer until it returns false .
* The producer function may call the passed consumer any number of times
* and return true if the producer should be called again or false
* otherwise . It ' s guaranteed that the producer will not be called anymore ,
* once it returns false .
* This method is particularly useful when producer changes the mutable
* object which should be left in known state after the full stream
* consumption . For example , the following code could be used to drain
* elements from the queue until it ' s empty or sentinel is reached
* ( consuming the sentinel ) :
* < pre > { @ code
* return StreamEx . produce ( action - > {
* T next = queue . poll ( ) ;
* if ( next = = null | | next . equals ( sentinel ) )
* return false ;
* action . accept ( next ) ;
* return true ;
* } ) ; } < / pre >
* Note however that if a short - circuiting operation is used , then the final
* state of the mutable object cannot be guaranteed .
* @ param < T > the type of the resulting stream elements
* @ param producer a predicate which calls the passed consumer to emit
* stream element ( s ) and returns true if it producer should be
* applied again .
* @ return the new stream
* @ since 0.6.0 */
public static < T > StreamEx < T > produce ( Predicate < Consumer < ? super T > > producer ) { } } | Box < Emitter < T > > box = new Box < > ( ) ; return ( box . a = action -> producer . test ( action ) ? box . a : null ) . stream ( ) ; |
public class ActionMarshaller { /** * Marshall the given parameter object . */
public void marshall ( Action action , ProtocolMarshaller protocolMarshaller ) { } } | if ( action == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( action . getDynamoDB ( ) , DYNAMODB_BINDING ) ; protocolMarshaller . marshall ( action . getDynamoDBv2 ( ) , DYNAMODBV2_BINDING ) ; protocolMarshaller . marshall ( action . getLambda ( ) , LAMBDA_BINDING ) ; protocolMarshaller . marshall ( action . getSns ( ) , SNS_BINDING ) ; protocolMarshaller . marshall ( action . getSqs ( ) , SQS_BINDING ) ; protocolMarshaller . marshall ( action . getKinesis ( ) , KINESIS_BINDING ) ; protocolMarshaller . marshall ( action . getRepublish ( ) , REPUBLISH_BINDING ) ; protocolMarshaller . marshall ( action . getS3 ( ) , S3_BINDING ) ; protocolMarshaller . marshall ( action . getFirehose ( ) , FIREHOSE_BINDING ) ; protocolMarshaller . marshall ( action . getCloudwatchMetric ( ) , CLOUDWATCHMETRIC_BINDING ) ; protocolMarshaller . marshall ( action . getCloudwatchAlarm ( ) , CLOUDWATCHALARM_BINDING ) ; protocolMarshaller . marshall ( action . getElasticsearch ( ) , ELASTICSEARCH_BINDING ) ; protocolMarshaller . marshall ( action . getSalesforce ( ) , SALESFORCE_BINDING ) ; protocolMarshaller . marshall ( action . getIotAnalytics ( ) , IOTANALYTICS_BINDING ) ; protocolMarshaller . marshall ( action . getIotEvents ( ) , IOTEVENTS_BINDING ) ; protocolMarshaller . marshall ( action . getStepFunctions ( ) , STEPFUNCTIONS_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class RasterImage { /** * Create raster from data or load from cache .
* @ param rasterMedia The raster media .
* @ param raster The raster data .
* @ param i The raster index .
* @ param save < code > true < / code > to save generated raster , < code > false < / code > else .
* @ return The created raster . */
private ImageBuffer createRaster ( Media rasterMedia , Raster raster , int i , boolean save ) { } } | final ImageBuffer rasterBuffer ; if ( rasterMedia . exists ( ) ) { rasterBuffer = Graphics . getImageBuffer ( rasterMedia ) ; rasterBuffer . prepare ( ) ; } else { final double fr = getRasterFactor ( i , raster . getRed ( ) ) ; final double fg = getRasterFactor ( i , raster . getGreen ( ) ) ; final double fb = getRasterFactor ( i , raster . getBlue ( ) ) ; rasterBuffer = Graphics . getRasterBuffer ( surface , fr , fg , fb ) ; if ( save ) { Graphics . saveImage ( rasterBuffer , rasterMedia ) ; } } return rasterBuffer ; |
public class StringSupport { /** * default is 0 , which means that all found characters must be replaced */
public static void replaceAll ( StringBuffer haystack , String needle , String newNeedle , int interval ) { } } | if ( needle == null ) { throw new IllegalArgumentException ( "string to replace can not be empty" ) ; } int idx = haystack . indexOf ( needle ) ; int nextIdx = - 1 ; int processedChunkSize = idx ; int needleLength = needle . length ( ) ; int newNeedleLength = newNeedle . length ( ) ; while ( idx != - 1 /* & & idx < haystack . length ( ) */
) { if ( processedChunkSize >= interval ) { haystack . replace ( idx , idx + needleLength , newNeedle ) ; nextIdx = haystack . indexOf ( needle , idx + newNeedleLength ) ; processedChunkSize = nextIdx - idx ; // length of replacement is not included
idx = nextIdx ; } else { nextIdx = haystack . indexOf ( needle , idx + newNeedleLength ) ; processedChunkSize += nextIdx - idx ; idx = nextIdx ; if ( newNeedleLength == 0 ) { return ; } } } |
public class LicenseFrame { /** * This method initializes txtLicense
* @ return javax . swing . JTextPane */
private JTextPane getTxtLicense ( ) { } } | if ( txtLicense == null ) { txtLicense = new JTextPane ( ) ; txtLicense . setName ( "txtLicense" ) ; txtLicense . setEditable ( false ) ; } return txtLicense ; |
public class ByteBufMono { /** * Convert to an { @ link InputStream } inbound { @ link Mono }
* @ return a { @ link InputStream } inbound { @ link Mono } */
public Mono < InputStream > asInputStream ( ) { } } | return handle ( ( bb , sink ) -> { try { sink . next ( new ReleasingInputStream ( bb ) ) ; } catch ( IllegalReferenceCountException e ) { sink . complete ( ) ; } } ) ; |
public class SignInApi { /** * HuaweiApiClient 连接结果回调
* @ param rst 结果码
* @ param client HuaweiApiClient 实例 */
@ Override public void onConnect ( int rst , HuaweiApiClient client ) { } } | if ( client == null || ! ApiClientMgr . INST . isConnect ( client ) ) { HMSAgentLog . e ( "client not connted" ) ; onSignInResult ( rst , null ) ; return ; } Activity curActivity = ActivityMgr . INST . getLastActivity ( ) ; if ( curActivity == null ) { HMSAgentLog . e ( "activity is null" ) ; onSignInResult ( HMSAgent . AgentResultCode . NO_ACTIVITY_FOR_USE , null ) ; return ; } PendingResult < SignInResult > signInResult = HuaweiId . HuaweiIdApi . signIn ( curActivity , client ) ; signInResult . setResultCallback ( new ResultCallback < SignInResult > ( ) { @ Override public void onResult ( SignInResult result ) { disposeSignInResult ( result ) ; } } ) ; |
public class JShell { /** * Returns the active variable snippets .
* This convenience method is equivalent to { @ code snippets ( ) } filtered for
* { @ link jdk . jshell . Snippet . Status # isActive ( ) status ( snippet ) . isActive ( ) }
* { @ code & & snippet . kind ( ) = = Kind . VARIABLE }
* and cast to { @ code VarSnippet } .
* @ return the active declared variables . */
public Stream < VarSnippet > variables ( ) { } } | return snippets ( ) . filter ( sn -> status ( sn ) . isActive ( ) && sn . kind ( ) == Snippet . Kind . VAR ) . map ( sn -> ( VarSnippet ) sn ) ; |
public class EJBSecurityCollaboratorImpl { /** * ( non - Javadoc )
* @ see com . ibm . ws . container . service . metadata . ComponentMetaDataListener # componentMetaDataCreated ( com . ibm . ws . container . service . metadata . MetaDataEvent ) */
@ Override public void componentMetaDataCreated ( MetaDataEvent < ComponentMetaData > event ) { } } | JaccService js = jaccService . getService ( ) ; if ( js != null ) { MetaData metaData = event . getMetaData ( ) ; if ( metaData instanceof BeanMetaData ) { BeanMetaData bmd = ( BeanMetaData ) metaData ; js . propagateEJBRoles ( bmd . j2eeName . getApplication ( ) , bmd . j2eeName . getModule ( ) , bmd . enterpriseBeanName , bmd . ivRoleLinkMap , JaccUtil . convertMethodInfoList ( JaccUtil . mergeMethodInfos ( bmd ) ) ) ; } } |
public class ConditionalBuilder { /** * Adds an { @ code else } clause encapsulating the given chunk to this conditional . */
public ConditionalBuilder setElse ( Statement trailingElse ) { } } | Preconditions . checkState ( this . trailingElse == null ) ; this . trailingElse = trailingElse ; return this ; |
public class DatasourceTemplate { /** * Return a list of objects obtained applying the given { @ link RowMapper } to each record .
* @ returnA not null empty list when the result set is empty .
* @ throws SQLException */
public < T > List < T > query ( String sql , RowMapper < T > rowMapper ) throws SQLException { } } | if ( logger . isDebugEnabled ( ) ) { logger . debug ( "Executing SQL statement [" + sql + "]" ) ; } Connection connection = null ; Statement stm = null ; ResultSet rs = null ; ArrayList < T > result = new ArrayList < T > ( ) ; try { connection = ds . getConnection ( ) ; stm = connection . createStatement ( ) ; rs = stm . executeQuery ( sql ) ; while ( rs . next ( ) ) { result . add ( rowMapper . map ( rs ) ) ; } } catch ( SQLException e ) { throw e ; } finally { if ( rs != null ) rs . close ( ) ; if ( stm != null ) stm . close ( ) ; if ( connection != null ) connection . close ( ) ; } return result ; |
public class AwsSecurityFindingFilters { /** * The key name associated with the instance .
* < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use
* { @ link # setResourceAwsEc2InstanceKeyName ( java . util . Collection ) } or
* { @ link # withResourceAwsEc2InstanceKeyName ( java . util . Collection ) } if you want to override the existing values .
* @ param resourceAwsEc2InstanceKeyName
* The key name associated with the instance .
* @ return Returns a reference to this object so that method calls can be chained together . */
public AwsSecurityFindingFilters withResourceAwsEc2InstanceKeyName ( StringFilter ... resourceAwsEc2InstanceKeyName ) { } } | if ( this . resourceAwsEc2InstanceKeyName == null ) { setResourceAwsEc2InstanceKeyName ( new java . util . ArrayList < StringFilter > ( resourceAwsEc2InstanceKeyName . length ) ) ; } for ( StringFilter ele : resourceAwsEc2InstanceKeyName ) { this . resourceAwsEc2InstanceKeyName . add ( ele ) ; } return this ; |
public class ModelsImpl { /** * Update an entity role for a given entity .
* @ param appId The application ID .
* @ param versionId The version ID .
* @ param entityId The entity ID .
* @ param roleId The entity role ID .
* @ param updateCustomPrebuiltEntityRoleOptionalParameter the object representing the optional parameters to be set before calling this API
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the observable to the OperationStatus object */
public Observable < ServiceResponse < OperationStatus > > updateCustomPrebuiltEntityRoleWithServiceResponseAsync ( UUID appId , String versionId , UUID entityId , UUID roleId , UpdateCustomPrebuiltEntityRoleOptionalParameter updateCustomPrebuiltEntityRoleOptionalParameter ) { } } | if ( this . client . endpoint ( ) == null ) { throw new IllegalArgumentException ( "Parameter this.client.endpoint() is required and cannot be null." ) ; } if ( appId == null ) { throw new IllegalArgumentException ( "Parameter appId is required and cannot be null." ) ; } if ( versionId == null ) { throw new IllegalArgumentException ( "Parameter versionId is required and cannot be null." ) ; } if ( entityId == null ) { throw new IllegalArgumentException ( "Parameter entityId is required and cannot be null." ) ; } if ( roleId == null ) { throw new IllegalArgumentException ( "Parameter roleId is required and cannot be null." ) ; } final String name = updateCustomPrebuiltEntityRoleOptionalParameter != null ? updateCustomPrebuiltEntityRoleOptionalParameter . name ( ) : null ; return updateCustomPrebuiltEntityRoleWithServiceResponseAsync ( appId , versionId , entityId , roleId , name ) ; |
public class RuleTemplateModelXMLPersistenceImpl { /** * ( non - Javadoc )
* @ see
* org . drools . ide . common . server . util . RuleModelPersistence # toModel ( java . lang . String */
@ Override public TemplateModel unmarshal ( final String xml ) { } } | if ( xml == null || xml . trim ( ) . length ( ) == 0 ) { return createEmptyModel ( ) ; } TemplateModel model = ( TemplateModel ) this . xt . fromXML ( xml ) ; // Upgrade model changes to legacy artifacts
ruleModelUpgrader1 . upgrade ( model ) ; ruleModelUpgrader2 . upgrade ( model ) ; ruleModelUpgrader3 . upgrade ( model ) ; ruleTemplateModelUpgrader1 . upgrade ( model ) ; model . putInSync ( ) ; return model ; |
public class BTree { /** * wrapping generic Comparator with support for Special + / - infinity sentinels */
static < V > int find ( Comparator < V > comparator , Object key , Object [ ] a , final int fromIndex , final int toIndex ) { } } | int low = fromIndex ; int high = toIndex - 1 ; while ( low <= high ) { int mid = ( low + high ) / 2 ; int cmp = comparator . compare ( ( V ) key , ( V ) a [ mid ] ) ; if ( cmp > 0 ) low = mid + 1 ; else if ( cmp < 0 ) high = mid - 1 ; else return mid ; // key found
} return - ( low + 1 ) ; // key not found . |
public class GrpcServerBuilder { /** * Add a service to this server .
* @ param serviceType the type of service
* @ param serviceDefinition the service definition of new service
* @ return an updated instance of this { @ link GrpcServerBuilder } */
public GrpcServerBuilder addService ( ServiceType serviceType , GrpcService serviceDefinition ) { } } | mServices . add ( serviceType ) ; return addService ( serviceDefinition ) ; |
public class V2ExtraJaxbClassModel { /** * { @ inheritDoc } */
@ Override public Class < ? > getClazz ( ClassLoader loader ) { } } | String c = getModelValue ( ) ; return c != null ? Classes . forName ( c , loader ) : null ; |
public class CommandRunnable { /** * { @ inheritDoc } */
@ Override protected void runInto ( ) throws JRebirthThreadException { } } | try { // Call the innerRun available with package visibility
this . command . innerRun ( this . wave ) ; } catch ( final CommandException ce ) { // Log all exception thrown during the execution of this command
LOGGER . error ( CMD_EXCEPTION , ce ) ; // Then update the wave status in order to perform right task after this failure
this . wave . status ( Wave . Status . Failed ) ; } catch ( final RuntimeException re ) { // Log any unplanned error occurred during the execution of this command
LOGGER . error ( UNPLANNED_FAILURE , re ) ; // Then update the wave status in order to perform right task after this failure
this . wave . status ( Wave . Status . Failed ) ; } |
public class ClassFile { /** * Resolves a constants value from the constant pool to the lowest form it
* can be represented by . E . g . String , Integer , Float , etc .
* @ author gcmurphy
* @ param index
* @ param cp
* @ return */
public static String constantValue ( int index , ConstantPool cp ) { } } | Constant type = cp . getConstant ( index ) ; if ( type != null ) { switch ( type . getTag ( ) ) { case Constants . CONSTANT_Class : ConstantClass cls = ( ConstantClass ) type ; return constantValue ( cls . getNameIndex ( ) , cp ) ; case Constants . CONSTANT_Double : ConstantDouble dbl = ( ConstantDouble ) type ; return String . valueOf ( dbl . getBytes ( ) ) ; case Constants . CONSTANT_Fieldref : ConstantFieldref fieldRef = ( ConstantFieldref ) type ; return constantValue ( fieldRef . getClassIndex ( ) , cp ) + " " + constantValue ( fieldRef . getNameAndTypeIndex ( ) , cp ) ; case Constants . CONSTANT_Float : ConstantFloat flt = ( ConstantFloat ) type ; return String . valueOf ( flt . getBytes ( ) ) ; case Constants . CONSTANT_Integer : ConstantInteger integer = ( ConstantInteger ) type ; return String . valueOf ( integer . getBytes ( ) ) ; case Constants . CONSTANT_InterfaceMethodref : ConstantInterfaceMethodref intRef = ( ConstantInterfaceMethodref ) type ; return constantValue ( intRef . getClassIndex ( ) , cp ) + " " + constantValue ( intRef . getNameAndTypeIndex ( ) , cp ) ; case Constants . CONSTANT_Long : ConstantLong lng = ( ConstantLong ) type ; return String . valueOf ( lng . getBytes ( ) ) ; case Constants . CONSTANT_Methodref : ConstantMethodref methRef = ( ConstantMethodref ) type ; return constantValue ( methRef . getClassIndex ( ) , cp ) + " " + constantValue ( methRef . getNameAndTypeIndex ( ) , cp ) ; case Constants . CONSTANT_NameAndType : ConstantNameAndType nameType = ( ConstantNameAndType ) type ; return nameType . getName ( cp ) + " " + nameType . getSignature ( cp ) ; case Constants . CONSTANT_String : ConstantString str = ( ConstantString ) type ; return str . getBytes ( cp ) ; case Constants . CONSTANT_Utf8 : ConstantUtf8 utf8 = ( ConstantUtf8 ) type ; return utf8 . getBytes ( ) ; } } return "" ; |
public class SelectItemsIterator { /** * < p > Return < code > true < / code > if the iteration has more elements . < / p > */
public boolean hasNext ( ) { } } | if ( items != null ) { if ( items . hasNext ( ) ) { return ( true ) ; } else { items = null ; } } Object next = findNextValidChild ( ) ; while ( next != null ) { initializeItems ( next ) ; if ( items != null ) { return true ; } else { next = findNextValidChild ( ) ; } } return false ; |
public class ClassTransformer { /** * Slim AnnotatedTypes */
public < T > BackedAnnotatedType < T > getBackedAnnotatedType ( final Class < T > rawType , final Type baseType , final String bdaId , final String suffix ) { } } | try { return backedAnnotatedTypes . getCastValue ( new TypeHolder < T > ( rawType , baseType , bdaId , suffix ) ) ; } catch ( RuntimeException e ) { if ( e instanceof TypeNotPresentException || e instanceof ResourceLoadingException ) { BootstrapLogger . LOG . exceptionWhileLoadingClass ( rawType . getName ( ) , e ) ; throw new ResourceLoadingException ( "Exception while loading class " + rawType . getName ( ) , e ) ; } throw e ; } catch ( Error e ) { if ( e instanceof NoClassDefFoundError || e instanceof LinkageError ) { throw new ResourceLoadingException ( "Error while loading class " + rawType . getName ( ) , e ) ; } BootstrapLogger . LOG . errorWhileLoadingClass ( rawType . getName ( ) , e ) ; throw e ; } |
public class EventTypeMapper { /** * Map originator type .
* @ param originatorType the originator type
* @ return the originator */
private static Originator mapOriginatorType ( OriginatorType originatorType ) { } } | Originator originator = new Originator ( ) ; if ( originatorType != null ) { originator . setCustomId ( originatorType . getCustomId ( ) ) ; originator . setHostname ( originatorType . getHostname ( ) ) ; originator . setIp ( originatorType . getIp ( ) ) ; originator . setProcessId ( originatorType . getProcessId ( ) ) ; originator . setPrincipal ( originatorType . getPrincipal ( ) ) ; } return originator ; |
public class PrcTradingSettingsSave { /** * < p > Process entity request . < / p >
* @ param pAddParam additional param , e . g . return this line ' s
* document in " nextEntity " for farther process
* @ param pRequestData Request Data
* @ param pEntity Entity to process
* @ return Entity processed for farther process or null
* @ throws Exception - an exception */
@ Override public final TradingSettings process ( final Map < String , Object > pAddParam , final TradingSettings pEntity , final IRequestData pRequestData ) throws Exception { } } | srvTradingSettings . saveTradingSettings ( pAddParam , pEntity ) ; return pEntity ; |
public class aaagroup_tmsessionpolicy_binding { /** * Use this API to fetch aaagroup _ tmsessionpolicy _ binding resources of given name . */
public static aaagroup_tmsessionpolicy_binding [ ] get ( nitro_service service , String groupname ) throws Exception { } } | aaagroup_tmsessionpolicy_binding obj = new aaagroup_tmsessionpolicy_binding ( ) ; obj . set_groupname ( groupname ) ; aaagroup_tmsessionpolicy_binding response [ ] = ( aaagroup_tmsessionpolicy_binding [ ] ) obj . get_resources ( service ) ; return response ; |
public class UserApi { /** * Changes custom attribute for the given user
* @ param userIdOrUsername the user in the form of an Integer ( ID ) , String ( username ) , or User instance
* @ param key for the customAttribute
* @ param value for the customAttribute
* @ return changedCustomAttribute
* @ throws GitLabApiException on failure while changing customAttributes */
public CustomAttribute changeCustomAttribute ( final Object userIdOrUsername , final String key , final String value ) throws GitLabApiException { } } | return createCustomAttribute ( userIdOrUsername , key , value ) ; |
public class PendingInvitation { /** * Gets the manager value for this PendingInvitation .
* @ return manager */
public com . google . api . ads . adwords . axis . v201809 . mcm . ManagedCustomer getManager ( ) { } } | return manager ; |
public class InternalSARLParser { /** * InternalSARL . g : 7962:1 : ruleXVariableDeclaration returns [ EObject current = null ] : ( ( ( ( ( ) ( ( ( ( ( ' var ' ) ) | ' val ' ) ( ( ' extension ' ) ) ? ) | ( ( ( ' extension ' ) ) ( ( ( ' var ' ) ) | ' val ' ) ) ) ) ) = > ( ( ) ( ( ( ( ( lv _ writeable _ 1_0 = ' var ' ) ) | otherlv _ 2 = ' val ' ) ( ( lv _ extension _ 3_0 = ' extension ' ) ) ? ) | ( ( ( lv _ extension _ 4_0 = ' extension ' ) ) ( ( ( lv _ writeable _ 5_0 = ' var ' ) ) | otherlv _ 6 = ' val ' ) ) ) ) ) ( ( lv _ name _ 7_0 = ruleValidID ) ) ( otherlv _ 8 = ' : ' ( ( lv _ type _ 9_0 = ruleJvmTypeReference ) ) ) ? ( otherlv _ 10 = ' = ' ( ( lv _ right _ 11_0 = ruleXExpression ) ) ) ? ) ; */
public final EObject ruleXVariableDeclaration ( ) throws RecognitionException { } } | EObject current = null ; Token lv_writeable_1_0 = null ; Token otherlv_2 = null ; Token lv_extension_3_0 = null ; Token lv_extension_4_0 = null ; Token lv_writeable_5_0 = null ; Token otherlv_6 = null ; Token otherlv_8 = null ; Token otherlv_10 = null ; AntlrDatatypeRuleToken lv_name_7_0 = null ; EObject lv_type_9_0 = null ; EObject lv_right_11_0 = null ; enterRule ( ) ; try { // InternalSARL . g : 7968:2 : ( ( ( ( ( ( ) ( ( ( ( ( ' var ' ) ) | ' val ' ) ( ( ' extension ' ) ) ? ) | ( ( ( ' extension ' ) ) ( ( ( ' var ' ) ) | ' val ' ) ) ) ) ) = > ( ( ) ( ( ( ( ( lv _ writeable _ 1_0 = ' var ' ) ) | otherlv _ 2 = ' val ' ) ( ( lv _ extension _ 3_0 = ' extension ' ) ) ? ) | ( ( ( lv _ extension _ 4_0 = ' extension ' ) ) ( ( ( lv _ writeable _ 5_0 = ' var ' ) ) | otherlv _ 6 = ' val ' ) ) ) ) ) ( ( lv _ name _ 7_0 = ruleValidID ) ) ( otherlv _ 8 = ' : ' ( ( lv _ type _ 9_0 = ruleJvmTypeReference ) ) ) ? ( otherlv _ 10 = ' = ' ( ( lv _ right _ 11_0 = ruleXExpression ) ) ) ? ) )
// InternalSARL . g : 7969:2 : ( ( ( ( ( ) ( ( ( ( ( ' var ' ) ) | ' val ' ) ( ( ' extension ' ) ) ? ) | ( ( ( ' extension ' ) ) ( ( ( ' var ' ) ) | ' val ' ) ) ) ) ) = > ( ( ) ( ( ( ( ( lv _ writeable _ 1_0 = ' var ' ) ) | otherlv _ 2 = ' val ' ) ( ( lv _ extension _ 3_0 = ' extension ' ) ) ? ) | ( ( ( lv _ extension _ 4_0 = ' extension ' ) ) ( ( ( lv _ writeable _ 5_0 = ' var ' ) ) | otherlv _ 6 = ' val ' ) ) ) ) ) ( ( lv _ name _ 7_0 = ruleValidID ) ) ( otherlv _ 8 = ' : ' ( ( lv _ type _ 9_0 = ruleJvmTypeReference ) ) ) ? ( otherlv _ 10 = ' = ' ( ( lv _ right _ 11_0 = ruleXExpression ) ) ) ? )
{ // InternalSARL . g : 7969:2 : ( ( ( ( ( ) ( ( ( ( ( ' var ' ) ) | ' val ' ) ( ( ' extension ' ) ) ? ) | ( ( ( ' extension ' ) ) ( ( ( ' var ' ) ) | ' val ' ) ) ) ) ) = > ( ( ) ( ( ( ( ( lv _ writeable _ 1_0 = ' var ' ) ) | otherlv _ 2 = ' val ' ) ( ( lv _ extension _ 3_0 = ' extension ' ) ) ? ) | ( ( ( lv _ extension _ 4_0 = ' extension ' ) ) ( ( ( lv _ writeable _ 5_0 = ' var ' ) ) | otherlv _ 6 = ' val ' ) ) ) ) ) ( ( lv _ name _ 7_0 = ruleValidID ) ) ( otherlv _ 8 = ' : ' ( ( lv _ type _ 9_0 = ruleJvmTypeReference ) ) ) ? ( otherlv _ 10 = ' = ' ( ( lv _ right _ 11_0 = ruleXExpression ) ) ) ? )
// InternalSARL . g : 7970:3 : ( ( ( ( ) ( ( ( ( ( ' var ' ) ) | ' val ' ) ( ( ' extension ' ) ) ? ) | ( ( ( ' extension ' ) ) ( ( ( ' var ' ) ) | ' val ' ) ) ) ) ) = > ( ( ) ( ( ( ( ( lv _ writeable _ 1_0 = ' var ' ) ) | otherlv _ 2 = ' val ' ) ( ( lv _ extension _ 3_0 = ' extension ' ) ) ? ) | ( ( ( lv _ extension _ 4_0 = ' extension ' ) ) ( ( ( lv _ writeable _ 5_0 = ' var ' ) ) | otherlv _ 6 = ' val ' ) ) ) ) ) ( ( lv _ name _ 7_0 = ruleValidID ) ) ( otherlv _ 8 = ' : ' ( ( lv _ type _ 9_0 = ruleJvmTypeReference ) ) ) ? ( otherlv _ 10 = ' = ' ( ( lv _ right _ 11_0 = ruleXExpression ) ) ) ?
{ // InternalSARL . g : 7970:3 : ( ( ( ( ) ( ( ( ( ( ' var ' ) ) | ' val ' ) ( ( ' extension ' ) ) ? ) | ( ( ( ' extension ' ) ) ( ( ( ' var ' ) ) | ' val ' ) ) ) ) ) = > ( ( ) ( ( ( ( ( lv _ writeable _ 1_0 = ' var ' ) ) | otherlv _ 2 = ' val ' ) ( ( lv _ extension _ 3_0 = ' extension ' ) ) ? ) | ( ( ( lv _ extension _ 4_0 = ' extension ' ) ) ( ( ( lv _ writeable _ 5_0 = ' var ' ) ) | otherlv _ 6 = ' val ' ) ) ) ) )
// InternalSARL . g : 7971:4 : ( ( ( ) ( ( ( ( ( ' var ' ) ) | ' val ' ) ( ( ' extension ' ) ) ? ) | ( ( ( ' extension ' ) ) ( ( ( ' var ' ) ) | ' val ' ) ) ) ) ) = > ( ( ) ( ( ( ( ( lv _ writeable _ 1_0 = ' var ' ) ) | otherlv _ 2 = ' val ' ) ( ( lv _ extension _ 3_0 = ' extension ' ) ) ? ) | ( ( ( lv _ extension _ 4_0 = ' extension ' ) ) ( ( ( lv _ writeable _ 5_0 = ' var ' ) ) | otherlv _ 6 = ' val ' ) ) ) )
{ // InternalSARL . g : 8011:4 : ( ( ) ( ( ( ( ( lv _ writeable _ 1_0 = ' var ' ) ) | otherlv _ 2 = ' val ' ) ( ( lv _ extension _ 3_0 = ' extension ' ) ) ? ) | ( ( ( lv _ extension _ 4_0 = ' extension ' ) ) ( ( ( lv _ writeable _ 5_0 = ' var ' ) ) | otherlv _ 6 = ' val ' ) ) ) )
// InternalSARL . g : 8012:5 : ( ) ( ( ( ( ( lv _ writeable _ 1_0 = ' var ' ) ) | otherlv _ 2 = ' val ' ) ( ( lv _ extension _ 3_0 = ' extension ' ) ) ? ) | ( ( ( lv _ extension _ 4_0 = ' extension ' ) ) ( ( ( lv _ writeable _ 5_0 = ' var ' ) ) | otherlv _ 6 = ' val ' ) ) )
{ // InternalSARL . g : 8012:5 : ( )
// InternalSARL . g : 8013:6:
{ if ( state . backtracking == 0 ) { current = forceCreateModelElement ( grammarAccess . getXVariableDeclarationAccess ( ) . getXtendVariableDeclarationAction_0_0_0 ( ) , current ) ; } } // InternalSARL . g : 8019:5 : ( ( ( ( ( lv _ writeable _ 1_0 = ' var ' ) ) | otherlv _ 2 = ' val ' ) ( ( lv _ extension _ 3_0 = ' extension ' ) ) ? ) | ( ( ( lv _ extension _ 4_0 = ' extension ' ) ) ( ( ( lv _ writeable _ 5_0 = ' var ' ) ) | otherlv _ 6 = ' val ' ) ) )
int alt222 = 2 ; int LA222_0 = input . LA ( 1 ) ; if ( ( ( LA222_0 >= 65 && LA222_0 <= 66 ) ) ) { alt222 = 1 ; } else if ( ( LA222_0 == 45 ) ) { alt222 = 2 ; } else { if ( state . backtracking > 0 ) { state . failed = true ; return current ; } NoViableAltException nvae = new NoViableAltException ( "" , 222 , 0 , input ) ; throw nvae ; } switch ( alt222 ) { case 1 : // InternalSARL . g : 8020:6 : ( ( ( ( lv _ writeable _ 1_0 = ' var ' ) ) | otherlv _ 2 = ' val ' ) ( ( lv _ extension _ 3_0 = ' extension ' ) ) ? )
{ // InternalSARL . g : 8020:6 : ( ( ( ( lv _ writeable _ 1_0 = ' var ' ) ) | otherlv _ 2 = ' val ' ) ( ( lv _ extension _ 3_0 = ' extension ' ) ) ? )
// InternalSARL . g : 8021:7 : ( ( ( lv _ writeable _ 1_0 = ' var ' ) ) | otherlv _ 2 = ' val ' ) ( ( lv _ extension _ 3_0 = ' extension ' ) ) ?
{ // InternalSARL . g : 8021:7 : ( ( ( lv _ writeable _ 1_0 = ' var ' ) ) | otherlv _ 2 = ' val ' )
int alt219 = 2 ; int LA219_0 = input . LA ( 1 ) ; if ( ( LA219_0 == 65 ) ) { alt219 = 1 ; } else if ( ( LA219_0 == 66 ) ) { alt219 = 2 ; } else { if ( state . backtracking > 0 ) { state . failed = true ; return current ; } NoViableAltException nvae = new NoViableAltException ( "" , 219 , 0 , input ) ; throw nvae ; } switch ( alt219 ) { case 1 : // InternalSARL . g : 8022:8 : ( ( lv _ writeable _ 1_0 = ' var ' ) )
{ // InternalSARL . g : 8022:8 : ( ( lv _ writeable _ 1_0 = ' var ' ) )
// InternalSARL . g : 8023:9 : ( lv _ writeable _ 1_0 = ' var ' )
{ // InternalSARL . g : 8023:9 : ( lv _ writeable _ 1_0 = ' var ' )
// InternalSARL . g : 8024:10 : lv _ writeable _ 1_0 = ' var '
{ lv_writeable_1_0 = ( Token ) match ( input , 65 , FOLLOW_75 ) ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { newLeafNode ( lv_writeable_1_0 , grammarAccess . getXVariableDeclarationAccess ( ) . getWriteableVarKeyword_0_0_1_0_0_0_0 ( ) ) ; } if ( state . backtracking == 0 ) { if ( current == null ) { current = createModelElement ( grammarAccess . getXVariableDeclarationRule ( ) ) ; } setWithLastConsumed ( current , "writeable" , true , "var" ) ; } } } } break ; case 2 : // InternalSARL . g : 8037:8 : otherlv _ 2 = ' val '
{ otherlv_2 = ( Token ) match ( input , 66 , FOLLOW_75 ) ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { newLeafNode ( otherlv_2 , grammarAccess . getXVariableDeclarationAccess ( ) . getValKeyword_0_0_1_0_0_1 ( ) ) ; } } break ; } // InternalSARL . g : 8042:7 : ( ( lv _ extension _ 3_0 = ' extension ' ) ) ?
int alt220 = 2 ; int LA220_0 = input . LA ( 1 ) ; if ( ( LA220_0 == 45 ) ) { alt220 = 1 ; } switch ( alt220 ) { case 1 : // InternalSARL . g : 8043:8 : ( lv _ extension _ 3_0 = ' extension ' )
{ // InternalSARL . g : 8043:8 : ( lv _ extension _ 3_0 = ' extension ' )
// InternalSARL . g : 8044:9 : lv _ extension _ 3_0 = ' extension '
{ lv_extension_3_0 = ( Token ) match ( input , 45 , FOLLOW_3 ) ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { newLeafNode ( lv_extension_3_0 , grammarAccess . getXVariableDeclarationAccess ( ) . getExtensionExtensionKeyword_0_0_1_0_1_0 ( ) ) ; } if ( state . backtracking == 0 ) { if ( current == null ) { current = createModelElement ( grammarAccess . getXVariableDeclarationRule ( ) ) ; } setWithLastConsumed ( current , "extension" , true , "extension" ) ; } } } break ; } } } break ; case 2 : // InternalSARL . g : 8058:6 : ( ( ( lv _ extension _ 4_0 = ' extension ' ) ) ( ( ( lv _ writeable _ 5_0 = ' var ' ) ) | otherlv _ 6 = ' val ' ) )
{ // InternalSARL . g : 8058:6 : ( ( ( lv _ extension _ 4_0 = ' extension ' ) ) ( ( ( lv _ writeable _ 5_0 = ' var ' ) ) | otherlv _ 6 = ' val ' ) )
// InternalSARL . g : 8059:7 : ( ( lv _ extension _ 4_0 = ' extension ' ) ) ( ( ( lv _ writeable _ 5_0 = ' var ' ) ) | otherlv _ 6 = ' val ' )
{ // InternalSARL . g : 8059:7 : ( ( lv _ extension _ 4_0 = ' extension ' ) )
// InternalSARL . g : 8060:8 : ( lv _ extension _ 4_0 = ' extension ' )
{ // InternalSARL . g : 8060:8 : ( lv _ extension _ 4_0 = ' extension ' )
// InternalSARL . g : 8061:9 : lv _ extension _ 4_0 = ' extension '
{ lv_extension_4_0 = ( Token ) match ( input , 45 , FOLLOW_76 ) ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { newLeafNode ( lv_extension_4_0 , grammarAccess . getXVariableDeclarationAccess ( ) . getExtensionExtensionKeyword_0_0_1_1_0_0 ( ) ) ; } if ( state . backtracking == 0 ) { if ( current == null ) { current = createModelElement ( grammarAccess . getXVariableDeclarationRule ( ) ) ; } setWithLastConsumed ( current , "extension" , true , "extension" ) ; } } } // InternalSARL . g : 8073:7 : ( ( ( lv _ writeable _ 5_0 = ' var ' ) ) | otherlv _ 6 = ' val ' )
int alt221 = 2 ; int LA221_0 = input . LA ( 1 ) ; if ( ( LA221_0 == 65 ) ) { alt221 = 1 ; } else if ( ( LA221_0 == 66 ) ) { alt221 = 2 ; } else { if ( state . backtracking > 0 ) { state . failed = true ; return current ; } NoViableAltException nvae = new NoViableAltException ( "" , 221 , 0 , input ) ; throw nvae ; } switch ( alt221 ) { case 1 : // InternalSARL . g : 8074:8 : ( ( lv _ writeable _ 5_0 = ' var ' ) )
{ // InternalSARL . g : 8074:8 : ( ( lv _ writeable _ 5_0 = ' var ' ) )
// InternalSARL . g : 8075:9 : ( lv _ writeable _ 5_0 = ' var ' )
{ // InternalSARL . g : 8075:9 : ( lv _ writeable _ 5_0 = ' var ' )
// InternalSARL . g : 8076:10 : lv _ writeable _ 5_0 = ' var '
{ lv_writeable_5_0 = ( Token ) match ( input , 65 , FOLLOW_3 ) ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { newLeafNode ( lv_writeable_5_0 , grammarAccess . getXVariableDeclarationAccess ( ) . getWriteableVarKeyword_0_0_1_1_1_0_0 ( ) ) ; } if ( state . backtracking == 0 ) { if ( current == null ) { current = createModelElement ( grammarAccess . getXVariableDeclarationRule ( ) ) ; } setWithLastConsumed ( current , "writeable" , true , "var" ) ; } } } } break ; case 2 : // InternalSARL . g : 8089:8 : otherlv _ 6 = ' val '
{ otherlv_6 = ( Token ) match ( input , 66 , FOLLOW_3 ) ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { newLeafNode ( otherlv_6 , grammarAccess . getXVariableDeclarationAccess ( ) . getValKeyword_0_0_1_1_1_1 ( ) ) ; } } break ; } } } break ; } } } // InternalSARL . g : 8098:3 : ( ( lv _ name _ 7_0 = ruleValidID ) )
// InternalSARL . g : 8099:4 : ( lv _ name _ 7_0 = ruleValidID )
{ // InternalSARL . g : 8099:4 : ( lv _ name _ 7_0 = ruleValidID )
// InternalSARL . g : 8100:5 : lv _ name _ 7_0 = ruleValidID
{ if ( state . backtracking == 0 ) { newCompositeNode ( grammarAccess . getXVariableDeclarationAccess ( ) . getNameValidIDParserRuleCall_1_0 ( ) ) ; } pushFollow ( FOLLOW_77 ) ; lv_name_7_0 = ruleValidID ( ) ; state . _fsp -- ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { if ( current == null ) { current = createModelElementForParent ( grammarAccess . getXVariableDeclarationRule ( ) ) ; } set ( current , "name" , lv_name_7_0 , "org.eclipse.xtend.core.Xtend.ValidID" ) ; afterParserOrEnumRuleCall ( ) ; } } } // InternalSARL . g : 8117:3 : ( otherlv _ 8 = ' : ' ( ( lv _ type _ 9_0 = ruleJvmTypeReference ) ) ) ?
int alt223 = 2 ; int LA223_0 = input . LA ( 1 ) ; if ( ( LA223_0 == 46 ) ) { alt223 = 1 ; } switch ( alt223 ) { case 1 : // InternalSARL . g : 8118:4 : otherlv _ 8 = ' : ' ( ( lv _ type _ 9_0 = ruleJvmTypeReference ) )
{ otherlv_8 = ( Token ) match ( input , 46 , FOLLOW_41 ) ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { newLeafNode ( otherlv_8 , grammarAccess . getXVariableDeclarationAccess ( ) . getColonKeyword_2_0 ( ) ) ; } // InternalSARL . g : 8122:4 : ( ( lv _ type _ 9_0 = ruleJvmTypeReference ) )
// InternalSARL . g : 8123:5 : ( lv _ type _ 9_0 = ruleJvmTypeReference )
{ // InternalSARL . g : 8123:5 : ( lv _ type _ 9_0 = ruleJvmTypeReference )
// InternalSARL . g : 8124:6 : lv _ type _ 9_0 = ruleJvmTypeReference
{ if ( state . backtracking == 0 ) { newCompositeNode ( grammarAccess . getXVariableDeclarationAccess ( ) . getTypeJvmTypeReferenceParserRuleCall_2_1_0 ( ) ) ; } pushFollow ( FOLLOW_78 ) ; lv_type_9_0 = ruleJvmTypeReference ( ) ; state . _fsp -- ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { if ( current == null ) { current = createModelElementForParent ( grammarAccess . getXVariableDeclarationRule ( ) ) ; } set ( current , "type" , lv_type_9_0 , "org.eclipse.xtext.xbase.Xtype.JvmTypeReference" ) ; afterParserOrEnumRuleCall ( ) ; } } } } break ; } // InternalSARL . g : 8142:3 : ( otherlv _ 10 = ' = ' ( ( lv _ right _ 11_0 = ruleXExpression ) ) ) ?
int alt224 = 2 ; int LA224_0 = input . LA ( 1 ) ; if ( ( LA224_0 == 47 ) ) { alt224 = 1 ; } switch ( alt224 ) { case 1 : // InternalSARL . g : 8143:4 : otherlv _ 10 = ' = ' ( ( lv _ right _ 11_0 = ruleXExpression ) )
{ otherlv_10 = ( Token ) match ( input , 47 , FOLLOW_45 ) ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { newLeafNode ( otherlv_10 , grammarAccess . getXVariableDeclarationAccess ( ) . getEqualsSignKeyword_3_0 ( ) ) ; } // InternalSARL . g : 8147:4 : ( ( lv _ right _ 11_0 = ruleXExpression ) )
// InternalSARL . g : 8148:5 : ( lv _ right _ 11_0 = ruleXExpression )
{ // InternalSARL . g : 8148:5 : ( lv _ right _ 11_0 = ruleXExpression )
// InternalSARL . g : 8149:6 : lv _ right _ 11_0 = ruleXExpression
{ if ( state . backtracking == 0 ) { newCompositeNode ( grammarAccess . getXVariableDeclarationAccess ( ) . getRightXExpressionParserRuleCall_3_1_0 ( ) ) ; } pushFollow ( FOLLOW_2 ) ; lv_right_11_0 = ruleXExpression ( ) ; state . _fsp -- ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { if ( current == null ) { current = createModelElementForParent ( grammarAccess . getXVariableDeclarationRule ( ) ) ; } set ( current , "right" , lv_right_11_0 , "org.eclipse.xtext.xbase.Xbase.XExpression" ) ; afterParserOrEnumRuleCall ( ) ; } } } } break ; } } } if ( state . backtracking == 0 ) { leaveRule ( ) ; } } catch ( RecognitionException re ) { recover ( input , re ) ; appendSkippedTokens ( ) ; } finally { } return current ; |
public class TouchState { /** * Compare two non - null states whose points are sorted by ID */
boolean equalsSorted ( TouchState ts ) { } } | if ( ts . pointCount == pointCount && ts . primaryID == primaryID && ts . window == window ) { for ( int i = 0 ; i < pointCount ; i ++ ) { Point p1 = ts . points [ i ] ; Point p2 = points [ i ] ; if ( p1 . x != p2 . x || p1 . y != p2 . y || p1 . id != p2 . id ) { return false ; } } return true ; } else { return false ; } |
public class CmsGalleryService { /** * Returns the workplace settings of the current user . < p >
* @ return the workplace settings */
private CmsWorkplaceSettings getWorkplaceSettings ( ) { } } | if ( m_workplaceSettings == null ) { m_workplaceSettings = CmsWorkplace . getWorkplaceSettings ( getCmsObject ( ) , getRequest ( ) ) ; } return m_workplaceSettings ; |
public class ProtoBufToFlatBufConversion { /** * Converts a file containing a model from the Protocol Buffer format to the Flat
* Buffer format .
* @ param inFile input file ( . pb format )
* @ param outFile output file ( . fb format )
* @ throws IOException
* @ throws org . nd4j . linalg . exception . ND4JIllegalStateException */
public static void convert ( String inFile , String outFile ) throws IOException , org . nd4j . linalg . exception . ND4JIllegalStateException { } } | SameDiff tg = TFGraphMapper . getInstance ( ) . importGraph ( new File ( inFile ) ) ; tg . asFlatFile ( new File ( outFile ) ) ; |
public class JsJmsMapMessageImpl { /** * Set a float value with the given name , into the Map .
* Javadoc description supplied by JsJmsMessage interface . */
public void setFloat ( String name , float value ) throws UnsupportedEncodingException { } } | if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( this , tc , "setFloat" , new Float ( value ) ) ; getBodyMap ( ) . put ( name , new Float ( value ) ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( this , tc , "setFloat" ) ; |
public class Intersection { /** * modifies the intersection by further intersecting it with a class / property
* IMPORTANT : the class / property is given by the DOWNWRD - SATURATED SET
* ( in other words , by the result of EquivalencesDAG . getSubRepresentatives
* @ param e a non - empty downward saturated set for class / property */
public void intersectWith ( T e ) { } } | if ( elements == null ) // we have top , the intersection is sub
elements = new HashSet < > ( dag . getSubRepresentatives ( e ) ) ; // copy the set
else elements . retainAll ( dag . getSubRepresentatives ( e ) ) ; |
public class IntTupleDistanceFunctions { /** * Computes the squared Euclidean distance between the given tuples
* when they are interpreted as points of a sphere with the specified
* size .
* @ param t0 The first array
* @ param t1 The second array
* @ param size The size of the sphere
* @ return The distance
* @ throws IllegalArgumentException If the given arrays do not
* have the same length */
static double computeWrappedEuclidean ( IntTuple t0 , IntTuple t1 , IntTuple size ) { } } | return Math . sqrt ( computeWrappedEuclideanSquared ( t0 , t1 , size ) ) ; |
public class pqpolicy_stats { /** * Use this API to fetch the statistics of all pqpolicy _ stats resources that are configured on netscaler . */
public static pqpolicy_stats [ ] get ( nitro_service service ) throws Exception { } } | pqpolicy_stats obj = new pqpolicy_stats ( ) ; pqpolicy_stats [ ] response = ( pqpolicy_stats [ ] ) obj . stat_resources ( service ) ; return response ; |
public class GooglePublicKeysManager { /** * Forces a refresh of the public certificates downloaded from { @ link # getPublicCertsEncodedUrl } .
* This method is automatically called from { @ link # getPublicKeys ( ) } if the public keys have not
* yet been initialized or if the expiration time is very close , so normally this doesn ' t need to
* be called . Only call this method to explicitly force the public keys to be updated . */
public GooglePublicKeysManager refresh ( ) throws GeneralSecurityException , IOException { } } | lock . lock ( ) ; try { publicKeys = new ArrayList < PublicKey > ( ) ; // HTTP request to public endpoint
CertificateFactory factory = SecurityUtils . getX509CertificateFactory ( ) ; HttpResponse certsResponse = transport . createRequestFactory ( ) . buildGetRequest ( new GenericUrl ( publicCertsEncodedUrl ) ) . execute ( ) ; expirationTimeMilliseconds = clock . currentTimeMillis ( ) + getCacheTimeInSec ( certsResponse . getHeaders ( ) ) * 1000 ; // parse each public key in the JSON response
JsonParser parser = jsonFactory . createJsonParser ( certsResponse . getContent ( ) ) ; JsonToken currentToken = parser . getCurrentToken ( ) ; // token is null at start , so get next token
if ( currentToken == null ) { currentToken = parser . nextToken ( ) ; } Preconditions . checkArgument ( currentToken == JsonToken . START_OBJECT ) ; try { while ( parser . nextToken ( ) != JsonToken . END_OBJECT ) { parser . nextToken ( ) ; String certValue = parser . getText ( ) ; X509Certificate x509Cert = ( X509Certificate ) factory . generateCertificate ( new ByteArrayInputStream ( StringUtils . getBytesUtf8 ( certValue ) ) ) ; publicKeys . add ( x509Cert . getPublicKey ( ) ) ; } publicKeys = Collections . unmodifiableList ( publicKeys ) ; } finally { parser . close ( ) ; } return this ; } finally { lock . unlock ( ) ; } |
public class LinkedHashMapPro { /** * Called by superclass constructors and pseudoconstructors ( clone , readObject ) before any entries
* are inserted into the map . Initializes the chain . */
@ Override void init ( ) { } } | header = new Entry < K , V > ( - 1 , null , null , null ) ; header . before = header . after = header ; |
public class ClassInfoList { /** * Filter this { @ link ClassInfoList } to include only { @ link Enum } classes .
* @ return The filtered list , containing only enums . */
public ClassInfoList getEnums ( ) { } } | return filter ( new ClassInfoFilter ( ) { @ Override public boolean accept ( final ClassInfo ci ) { return ci . isEnum ( ) ; } } ) ; |
public class MetricsCacheSink { /** * If so , restart the metricsCacheClientService with the new MetricsCacheLocation */
private void startMetricsCacheChecker ( ) { } } | final int checkIntervalSec = TypeUtils . getInteger ( sinkConfig . get ( KEY_TMASTER_LOCATION_CHECK_INTERVAL_SEC ) ) ; Runnable runnable = new Runnable ( ) { @ Override public void run ( ) { TopologyMaster . MetricsCacheLocation location = ( TopologyMaster . MetricsCacheLocation ) SingletonRegistry . INSTANCE . getSingleton ( MetricsManagerServer . METRICSCACHE_LOCATION_BEAN_NAME ) ; if ( location != null ) { if ( currentMetricsCacheLocation == null || ! location . equals ( currentMetricsCacheLocation ) ) { LOG . info ( "Update current MetricsCacheLocation to: " + location ) ; currentMetricsCacheLocation = location ; metricsCacheClientService . updateMetricsCacheLocation ( currentMetricsCacheLocation ) ; metricsCacheClientService . startNewMasterClient ( ) ; // Update Metrics
sinkContext . exportCountMetric ( METRICSMGR_LOCATION_UPDATE_COUNT , 1 ) ; } } // Schedule itself in future
tMasterLocationStarter . schedule ( this , checkIntervalSec , TimeUnit . SECONDS ) ; } } ; // First Entry
tMasterLocationStarter . schedule ( runnable , checkIntervalSec , TimeUnit . SECONDS ) ; LOG . info ( "MetricsCacheChecker started with interval: " + checkIntervalSec ) ; |
public class AlluxioURI { /** * Checks if the path is a windows path . This should be platform independent .
* @ param path the path to check
* @ param slashed if the path starts with a slash
* @ return true if it is a windows path , false otherwise */
public static boolean hasWindowsDrive ( String path , boolean slashed ) { } } | int start = slashed ? 1 : 0 ; return path . length ( ) >= start + 2 && ( ! slashed || path . charAt ( 0 ) == '/' ) && path . charAt ( start + 1 ) == ':' && ( ( path . charAt ( start ) >= 'A' && path . charAt ( start ) <= 'Z' ) || ( path . charAt ( start ) >= 'a' && path . charAt ( start ) <= 'z' ) ) ; |
public class SealStreamTask { /** * A method that issues abort request for all outstanding transactions on the stream , which are processed asynchronously .
* This method returns false if it found transactions to abort , true otherwise .
* @ param context operation context
* @ param scope scope
* @ param stream stream
* @ param requestId requestId
* @ return CompletableFuture which when complete will contain a boolean indicating if there are transactions of the
* stream or not . */
private CompletableFuture < Boolean > abortTransaction ( OperationContext context , String scope , String stream , long requestId ) { } } | return streamMetadataStore . getActiveTxns ( scope , stream , context , executor ) . thenCompose ( activeTxns -> { if ( activeTxns == null || activeTxns . isEmpty ( ) ) { return CompletableFuture . completedFuture ( true ) ; } else { // abort transactions
return Futures . allOf ( activeTxns . entrySet ( ) . stream ( ) . map ( txIdPair -> { CompletableFuture < Void > voidCompletableFuture ; if ( txIdPair . getValue ( ) . getTxnStatus ( ) . equals ( TxnStatus . OPEN ) ) { voidCompletableFuture = Futures . toVoid ( streamTransactionMetadataTasks . abortTxn ( scope , stream , txIdPair . getKey ( ) , null , context ) . exceptionally ( e -> { Throwable cause = Exceptions . unwrap ( e ) ; if ( cause instanceof StoreException . IllegalStateException || cause instanceof StoreException . WriteConflictException || cause instanceof StoreException . DataNotFoundException ) { // IllegalStateException : The transaction is already in the process of being
// completed . Ignore
// WriteConflictException : Another thread is updating the transaction record .
// ignore . We will effectively retry cleaning up the transaction if it is not
// already being aborted .
// DataNotFoundException : If transaction metadata is cleaned up after reading list
// of active segments
log . debug ( requestId , "A known exception thrown during seal stream " + "while trying to abort transaction on stream {}/{}" , scope , stream , cause ) ; } else { // throw the original exception
// Note : we can ignore this error because if there are transactions found on a stream ,
// seal stream reposts the event back into request stream .
// So in subsequent iteration it will reattempt to abort all active transactions .
// This is a valid course of action because it is important to understand that
// all transactions are completable ( either via abort of commit ) .
log . warn ( requestId , "Exception thrown during seal stream while trying " + "to abort transaction on stream {}/{}" , scope , stream , cause ) ; } return null ; } ) ) ; } else { voidCompletableFuture = CompletableFuture . completedFuture ( null ) ; } return voidCompletableFuture ; } ) . collect ( Collectors . toList ( ) ) ) . thenApply ( v -> false ) ; } } ) ; |
public class GenericsHelper { /** * same as class . isAssignableFrom ( class ) but with implicit cast check for native numbers */
public static boolean isAssignableFrom ( Class < ? > assignedCls , Class < ? > assigningCls ) { } } | if ( isNativeNumberType ( assigningCls ) && isNativeNumberType ( assignedCls ) ) { return isNumberImplicitlyCastableFrom ( assignedCls , assigningCls ) ; } return assignedCls . isAssignableFrom ( assigningCls ) ; |
public class FileSupport { /** * Retrieves all files from a directory and its subdirectories
* matching the given mask .
* @ param file directory
* @ param includeMask mask to match
* @ return a list containing the found files */
public static ArrayList < File > getFilesInDirectoryTree ( File file , String includeMask ) { } } | return getContentsInDirectoryTree ( file , includeMask , true , false ) ; |
public class BarcodeUtils { /** * Call this from your { @ link Activity } ' s { @ link Activity # onActivityResult ( int , int , Intent ) } method .
* @ param requestCode
* @ param resultCode
* @ param intent
* @ return null if the event handled here was not related to this class , or else an { @ link BarcodeIntentResult }
* containing the result of the scan . If the user cancelled scanning , the fields will be null . */
public static BarcodeIntentResult parseActivityResult ( int requestCode , int resultCode , Intent intent ) { } } | if ( requestCode == REQUEST_CODE && resultCode == Activity . RESULT_OK ) { String contents = intent . getStringExtra ( SCAN_RESULT ) ; String formatName = intent . getStringExtra ( SCAN_RESULT_FORMAT ) ; return new BarcodeIntentResult ( contents , formatName ) ; } return null ; |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.