signature
stringlengths 43
39.1k
| implementation
stringlengths 0
450k
|
|---|---|
public class XWikiSyntaxChainingRenderer { /** * { @ inheritDoc }
* @ since 2.0RC1 */
@ Override public void beginDefinitionList ( Map < String , String > parameters ) { } }
|
if ( getBlockState ( ) . getDefinitionListDepth ( ) == 1 && ! getBlockState ( ) . isInList ( ) ) { printEmptyLine ( ) ; } else { print ( "\n" ) ; } printParameters ( parameters ) ;
|
public class RTMPConnection { /** * Mark message as being written .
* @ param message
* Message to mark */
protected void writingMessage ( Packet message ) { } }
|
if ( message . getMessage ( ) instanceof VideoData ) { Number streamId = message . getHeader ( ) . getStreamId ( ) ; final AtomicInteger value = new AtomicInteger ( ) ; AtomicInteger old = pendingVideos . putIfAbsent ( streamId . doubleValue ( ) , value ) ; if ( old == null ) { old = value ; } old . incrementAndGet ( ) ; }
|
public class ConnectionBase { /** * See { @ link # setState ( int ) } , with additional notification of internal threads .
* @ param newState new state to set */
protected final void setStateNotify ( final int newState ) { } }
|
synchronized ( lock ) { setState ( newState ) ; if ( newState == OK && ! inBlockingSend ) this . sendWaitQueue . release ( false ) ; // worst case : we notify 2 threads , the closing one and 1 sending
lock . notifyAll ( ) ; }
|
public class HessianInput { /** * Reads an object from the input stream with an expected type . */
public Object readObject ( Class cl ) throws IOException { } }
|
if ( cl == null || cl == Object . class ) return readObject ( ) ; int tag = read ( ) ; switch ( tag ) { case 'N' : return null ; case 'M' : { String type = readType ( ) ; // hessian / 3386
if ( "" . equals ( type ) ) { Deserializer reader ; reader = _serializerFactory . getDeserializer ( cl ) ; return reader . readMap ( this ) ; } else { Deserializer reader ; reader = _serializerFactory . getObjectDeserializer ( type ) ; return reader . readMap ( this ) ; } } case 'V' : { String type = readType ( ) ; int length = readLength ( ) ; Deserializer reader ; reader = _serializerFactory . getObjectDeserializer ( type ) ; if ( cl != reader . getType ( ) && cl . isAssignableFrom ( reader . getType ( ) ) ) return reader . readList ( this , length ) ; reader = _serializerFactory . getDeserializer ( cl ) ; Object v = reader . readList ( this , length ) ; return v ; } case 'R' : { int ref = parseInt ( ) ; return _refs . get ( ref ) ; } case 'r' : { String type = readType ( ) ; String url = readString ( ) ; return resolveRemote ( type , url ) ; } } _peek = tag ; // hessian / 332i vs hessian / 3406
// return readObject ( ) ;
Object value = _serializerFactory . getDeserializer ( cl ) . readObject ( this ) ; return value ;
|
public class ObjectPoolWithThreadAffinity { /** * Clears any objects sitting idle in the pool . */
public synchronized void clear ( ) { } }
|
pool . forEachUntil ( new DoUntilProcedure < E > ( ) { public boolean execute ( E object ) { destroyObject ( object ) ; return false ; } } ) ; pool . clear ( ) ; numActive = 0 ; notifyAll ( ) ; // num sleeping has changed
|
public class CommerceWishListItemPersistenceImpl { /** * Returns the first commerce wish list item in the ordered set where commerceWishListId = & # 63 ; .
* @ param commerceWishListId the commerce wish list ID
* @ param orderByComparator the comparator to order the set by ( optionally < code > null < / code > )
* @ return the first matching commerce wish list item
* @ throws NoSuchWishListItemException if a matching commerce wish list item could not be found */
@ Override public CommerceWishListItem findByCommerceWishListId_First ( long commerceWishListId , OrderByComparator < CommerceWishListItem > orderByComparator ) throws NoSuchWishListItemException { } }
|
CommerceWishListItem commerceWishListItem = fetchByCommerceWishListId_First ( commerceWishListId , orderByComparator ) ; if ( commerceWishListItem != null ) { return commerceWishListItem ; } StringBundler msg = new StringBundler ( 4 ) ; msg . append ( _NO_SUCH_ENTITY_WITH_KEY ) ; msg . append ( "commerceWishListId=" ) ; msg . append ( commerceWishListId ) ; msg . append ( "}" ) ; throw new NoSuchWishListItemException ( msg . toString ( ) ) ;
|
public class AfplibPackageImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
public EClass getGCCBEZRG ( ) { } }
|
if ( gccbezrgEClass == null ) { gccbezrgEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( AfplibPackage . eNS_URI ) . getEClassifiers ( ) . get ( 493 ) ; } return gccbezrgEClass ;
|
public class LongStream { /** * Returns { @ code LongStream } with elements that satisfy the given predicate .
* < p > This is an intermediate operation .
* < p > Example :
* < pre >
* predicate : ( a ) - & gt ; a & gt ; 2
* stream : [ 1 , 2 , 3 , 4 , - 8 , 0 , 11]
* result : [ 3 , 4 , 11]
* < / pre >
* @ param predicate the predicate used to filter elements
* @ return the new stream */
@ NotNull public LongStream filter ( @ NotNull final LongPredicate predicate ) { } }
|
return new LongStream ( params , new LongFilter ( iterator , predicate ) ) ;
|
public class Deleter { /** * Actually delete the resource . */
public void delete ( ) throws IOException , PlivoRestException { } }
|
validate ( ) ; Response < ResponseBody > response = obtainCall ( ) . execute ( ) ; handleResponse ( response ) ;
|
public class CLibrary { /** * Returns the limit on the number of open files or null
* on failure */
public static Integer getOpenFileLimit ( ) { } }
|
try { Rlimit rlimit = new Rlimit ( ) ; int retval = getrlimit ( System . getProperty ( "os.name" ) . equals ( "Linux" ) ? RLIMIT_NOFILE_LINUX : RLIMIT_NOFILE_MAC_OS_X , rlimit ) ; if ( retval != 0 ) { return null ; } else if ( rlimit . rlim_cur >= 1024 ) { // Seems to be a sensible value that is the default or greater
return ( int ) rlimit . rlim_cur ; } else { return null ; } } catch ( Exception e ) { hostLog . warn ( "Failed to retrieve open file limit via JNA" , e ) ; } return null ;
|
public class SMIDAS { /** * Sets the learning rate used during training
* @ param eta the learning rate to use */
public void setEta ( double eta ) { } }
|
if ( Double . isNaN ( eta ) || Double . isInfinite ( eta ) || eta <= 0 ) throw new ArithmeticException ( "convergence parameter must be a positive value" ) ; this . eta = eta ;
|
public class Matrix3x2f { /** * Apply a rotation transformation to this matrix by rotating the given amount of radians about
* the specified rotation center < code > ( x , y ) < / code > and store the result in < code > dest < / code > .
* This method is equivalent to calling : < code > translate ( x , y , dest ) . rotate ( ang ) . translate ( - x , - y ) < / code >
* If < code > M < / code > is < code > this < / code > matrix and < code > R < / code > the rotation matrix ,
* then the new matrix will be < code > M * R < / code > . So when transforming a
* vector < code > v < / code > with the new matrix by using < code > M * R * v < / code > , the rotation will be applied first !
* @ see # translate ( float , float , Matrix3x2f )
* @ see # rotate ( float , Matrix3x2f )
* @ param ang
* the angle in radians
* @ param x
* the x component of the rotation center
* @ param y
* the y component of the rotation center
* @ param dest
* will hold the result
* @ return dest */
public Matrix3x2f rotateAbout ( float ang , float x , float y , Matrix3x2f dest ) { } }
|
float tm20 = m00 * x + m10 * y + m20 ; float tm21 = m01 * x + m11 * y + m21 ; float cos = ( float ) Math . cos ( ang ) ; float sin = ( float ) Math . sin ( ang ) ; float nm00 = m00 * cos + m10 * sin ; float nm01 = m01 * cos + m11 * sin ; dest . m10 = m00 * - sin + m10 * cos ; dest . m11 = m01 * - sin + m11 * cos ; dest . m00 = nm00 ; dest . m01 = nm01 ; dest . m20 = dest . m00 * - x + dest . m10 * - y + tm20 ; dest . m21 = dest . m01 * - x + dest . m11 * - y + tm21 ; return dest ;
|
public class SARLJvmModelInferrer { /** * Copy the JVM operations from the source to the destination .
* @ param source the source .
* @ param target the destination .
* @ param createdActions the set of actions that are created before ( input ) or during ( output ) the invocation .
* @ param bodyBuilder the builder of the target ' s operations .
* @ since 0.5 */
@ SuppressWarnings ( "checkstyle:npathcomplexity" ) protected void copyNonStaticPublicJvmOperations ( JvmGenericType source , JvmGenericType target , Set < ActionPrototype > createdActions , Procedure2 < ? super JvmOperation , ? super ITreeAppendable > bodyBuilder ) { } }
|
final Iterable < JvmOperation > operations = Iterables . transform ( Iterables . filter ( source . getMembers ( ) , it -> { if ( it instanceof JvmOperation ) { final JvmOperation op = ( JvmOperation ) it ; return ! op . isStatic ( ) && op . getVisibility ( ) == JvmVisibility . PUBLIC ; } return false ; } ) , it -> ( JvmOperation ) it ) ; for ( final JvmOperation operation : operations ) { final ActionParameterTypes types = this . sarlSignatureProvider . createParameterTypesFromJvmModel ( operation . isVarArgs ( ) , operation . getParameters ( ) ) ; final ActionPrototype actSigKey = this . sarlSignatureProvider . createActionPrototype ( operation . getSimpleName ( ) , types ) ; if ( createdActions . add ( actSigKey ) ) { final JvmOperation newOp = this . typesFactory . createJvmOperation ( ) ; target . getMembers ( ) . add ( newOp ) ; newOp . setAbstract ( false ) ; newOp . setFinal ( false ) ; newOp . setNative ( false ) ; newOp . setStatic ( false ) ; newOp . setSynchronized ( false ) ; newOp . setVisibility ( JvmVisibility . PUBLIC ) ; newOp . setDefault ( operation . isDefault ( ) ) ; newOp . setDeprecated ( operation . isDeprecated ( ) ) ; newOp . setSimpleName ( operation . getSimpleName ( ) ) ; newOp . setStrictFloatingPoint ( operation . isStrictFloatingPoint ( ) ) ; copyTypeParametersFromJvmOperation ( operation , newOp ) ; for ( final JvmTypeReference exception : operation . getExceptions ( ) ) { newOp . getExceptions ( ) . add ( cloneWithTypeParametersAndProxies ( exception , newOp ) ) ; } for ( final JvmFormalParameter parameter : operation . getParameters ( ) ) { final JvmFormalParameter newParam = this . typesFactory . createJvmFormalParameter ( ) ; newOp . getParameters ( ) . add ( newParam ) ; newParam . setName ( parameter . getSimpleName ( ) ) ; newParam . setParameterType ( cloneWithTypeParametersAndProxies ( parameter . getParameterType ( ) , newOp ) ) ; } newOp . setVarArgs ( operation . isVarArgs ( ) ) ; newOp . setReturnType ( cloneWithTypeParametersAndProxies ( operation . getReturnType ( ) , newOp ) ) ; setBody ( newOp , it -> bodyBuilder . apply ( operation , it ) ) ; } }
|
public class BaggageResolver { /** * 通过请求透传数据
* @ param context RpcInvokeContext
* @ param request 请求 */
public static void carryWithRequest ( RpcInvokeContext context , SofaRequest request ) { } }
|
if ( context != null ) { Map < String , String > requestBaggage = context . getAllRequestBaggage ( ) ; if ( CommonUtils . isNotEmpty ( requestBaggage ) ) { // 需要透传
request . addRequestProp ( RemotingConstants . RPC_REQUEST_BAGGAGE , requestBaggage ) ; } }
|
public class ParallelStepBuilder { /** * Build a generated job with only one flow in it to submit to the
* BatchKernel . This is used to build subjobs from splits . */
public static JSLJob buildFlowInSplitSubJob ( long topLevelJobInstanceId , JobContext jobContext , Split split , Flow flow ) { } }
|
ObjectFactory jslFactory = new ObjectFactory ( ) ; JSLJob subJob = jslFactory . createJSLJob ( ) ; // Uses the true top - level job instance id , not an internal " subjob " id .
String subJobId = generateSubJobId ( topLevelJobInstanceId , split . getId ( ) , flow . getId ( ) ) ; subJob . setId ( subJobId ) ; // Copy all properties from parent JobContext to flow threads
subJob . setProperties ( CloneUtility . javaPropsTojslProperties ( jobContext . getProperties ( ) ) ) ; // We don ' t need to do a deep copy here since each flow is already independent of all others , unlike in a partition
// where one step instance can be executed with different properties on multiple threads .
subJob . getExecutionElements ( ) . add ( flow ) ; return subJob ;
|
public class HealthPinger { /** * Pings the service and completes if successful - and fails if it didn ' t work
* for some reason ( reason is in the exception ) . */
private static Observable < PingServiceHealth > pingBinary ( final NetworkAddress hostname , final String bucket , final ClusterFacade core , final long timeout , final TimeUnit timeUnit ) { } }
|
final AtomicReference < CouchbaseRequest > request = new AtomicReference < CouchbaseRequest > ( ) ; Observable < NoopResponse > response = Observable . defer ( new Func0 < Observable < NoopResponse > > ( ) { @ Override public Observable < NoopResponse > call ( ) { CouchbaseRequest r = new NoopRequest ( bucket , hostname ) ; request . set ( r ) ; return core . send ( r ) ; } } ) . timeout ( timeout , timeUnit ) ; return mapToServiceHealth ( bucket , ServiceType . BINARY , response , request , timeout , timeUnit ) ;
|
public class InternalXbaseParser { /** * InternalXbase . g : 1083:1 : entryRuleXExpressionOrVarDeclaration : ruleXExpressionOrVarDeclaration EOF ; */
public final void entryRuleXExpressionOrVarDeclaration ( ) throws RecognitionException { } }
|
try { // InternalXbase . g : 1084:1 : ( ruleXExpressionOrVarDeclaration EOF )
// InternalXbase . g : 1085:1 : ruleXExpressionOrVarDeclaration EOF
{ if ( state . backtracking == 0 ) { before ( grammarAccess . getXExpressionOrVarDeclarationRule ( ) ) ; } pushFollow ( FOLLOW_1 ) ; ruleXExpressionOrVarDeclaration ( ) ; state . _fsp -- ; if ( state . failed ) return ; if ( state . backtracking == 0 ) { after ( grammarAccess . getXExpressionOrVarDeclarationRule ( ) ) ; } match ( input , EOF , FOLLOW_2 ) ; if ( state . failed ) return ; } } catch ( RecognitionException re ) { reportError ( re ) ; recover ( input , re ) ; } finally { } return ;
|
public class Ifc4PackageImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
@ Override public EEnum getIfcObjectTypeEnum ( ) { } }
|
if ( ifcObjectTypeEnumEEnum == null ) { ifcObjectTypeEnumEEnum = ( EEnum ) EPackage . Registry . INSTANCE . getEPackage ( Ifc4Package . eNS_URI ) . getEClassifiers ( ) . get ( 1021 ) ; } return ifcObjectTypeEnumEEnum ;
|
public class CmsImportVersion10 { /** * Sets the userLastModified . < p >
* @ param userLastModified the userLastModified to set
* @ see # N _ USERLASTMODIFIED
* @ see # addResourceAttributesRules ( Digester , String ) */
public void setUserLastModified ( String userLastModified ) { } }
|
if ( null == userLastModified ) { // The optional user last modified information is not provided
m_resourceBuilder . setUserLastModified ( getRequestContext ( ) . getCurrentUser ( ) . getId ( ) ) ; } else { // use the user last modified information from the manifest
try { String userLastModifiedName = OpenCms . getImportExportManager ( ) . translateUser ( userLastModified ) ; try { m_resourceBuilder . setUserLastModified ( getCms ( ) . readUser ( userLastModifiedName ) . getId ( ) ) ; } catch ( @ SuppressWarnings ( "unused" ) CmsDbEntryNotFoundException e ) { m_resourceBuilder . setUserLastModified ( getRequestContext ( ) . getCurrentUser ( ) . getId ( ) ) ; } } catch ( Throwable e ) { setThrowable ( e ) ; } }
|
public class MessageQueueFilter { /** * Takes the filtered message from message queue . This method will be blocked
* until the message becomes available or timeout is detected .
* @ param timeoutMs the timeout for blocking , in millisecond .
* @ return filtered message tuple .
* @ throws InterruptedException in case of interrupt on blocking .
* @ throws TimeoutException if timeout happens during blocking . */
protected MessageTuple getMessage ( int timeoutMs ) throws InterruptedException , TimeoutException { } }
|
MessageTuple tuple = messageQueue . poll ( timeoutMs , TimeUnit . MILLISECONDS ) ; if ( tuple == null ) { throw new TimeoutException ( "Timeout while waiting for the message." ) ; } if ( tuple . getMessage ( ) . getType ( ) == MessageType . SHUT_DOWN ) { // If it ' s SHUT _ DOWN message .
throw new LeftCluster ( "Left cluster" ) ; } return tuple ;
|
public class ConsumerService { /** * Dispatch message to callback . Override to address special threading considerations .
* @ param channel The channel that delivered the message .
* @ param message The message to dispatch .
* @ param callbacks The callbacks to receive the message . */
protected void dispatchMessages ( String channel , Message message , Set < IMessageCallback > callbacks ) { } }
|
for ( IMessageCallback callback : callbacks ) { try { callback . onMessage ( channel , message ) ; } catch ( Exception e ) { } }
|
public class Problem { /** * Filters and returns first problem input which name
* match the given < tt > type < / tt >
* @ param type Type of the input to retrieve ( usually small or large ) .
* @ return Corresponding input if any , < tt > null < / tt > otherwise . */
public ProblemInput getProblemInput ( final String type ) { } }
|
final String name = type . toLowerCase ( ) ; for ( final ProblemInput input : getProblemInputs ( ) ) { if ( input . getName ( ) . equals ( name ) ) { return input ; } } return null ;
|
public class VasEventHandler { /** * Updates the ' reservable ' property of the charging station . If the charging station cannot be found in the
* repository an error is logged .
* @ param chargingStationId charging station identifier .
* @ param reservable true if the charging station is reservable , false otherwise . */
private void updateReservableForChargingStation ( ChargingStationId chargingStationId , boolean reservable ) { } }
|
ChargingStation chargingStation = getChargingStation ( chargingStationId ) ; if ( chargingStation != null ) { chargingStation . setReservable ( reservable ) ; chargingStationRepository . createOrUpdate ( chargingStation ) ; }
|
public class CommercePriceListPersistenceImpl { /** * Returns an ordered range of all the commerce price lists where commerceCurrencyId = & # 63 ; .
* Useful when paginating results . Returns a maximum of < code > end - start < / code > instances . < code > start < / code > and < code > end < / code > are not primary keys , they are indexes in the result set . Thus , < code > 0 < / code > refers to the first result in the set . Setting both < code > start < / code > and < code > end < / code > to { @ link QueryUtil # ALL _ POS } will return the full result set . If < code > orderByComparator < / code > is specified , then the query will include the given ORDER BY logic . If < code > orderByComparator < / code > is absent and pagination is required ( < code > start < / code > and < code > end < / code > are not { @ link QueryUtil # ALL _ POS } ) , then the query will include the default ORDER BY logic from { @ link CommercePriceListModelImpl } . If both < code > orderByComparator < / code > and pagination are absent , for performance reasons , the query will not have an ORDER BY clause and the returned result set will be sorted on by the primary key in an ascending order .
* @ param commerceCurrencyId the commerce currency ID
* @ param start the lower bound of the range of commerce price lists
* @ param end the upper bound of the range of commerce price lists ( not inclusive )
* @ param orderByComparator the comparator to order the results by ( optionally < code > null < / code > )
* @ return the ordered range of matching commerce price lists */
@ Override public List < CommercePriceList > findByCommerceCurrencyId ( long commerceCurrencyId , int start , int end , OrderByComparator < CommercePriceList > orderByComparator ) { } }
|
return findByCommerceCurrencyId ( commerceCurrencyId , start , end , orderByComparator , true ) ;
|
public class SparseBaseLevel1 { /** * Computes the Euclidean norm of a vector .
* @ param arr a vector
* @ return the Euclidean norm of the vector */
@ Override public double nrm2 ( INDArray arr ) { } }
|
switch ( arr . data ( ) . dataType ( ) ) { case DOUBLE : DefaultOpExecutioner . validateDataType ( DataType . DOUBLE , arr ) ; return dnrm2 ( arr . length ( ) , arr , 1 ) ; case FLOAT : DefaultOpExecutioner . validateDataType ( DataType . FLOAT , arr ) ; return snrm2 ( arr . length ( ) , arr , 1 ) ; case HALF : return hnrm2 ( arr . length ( ) , arr , 1 ) ; default : } throw new UnsupportedOperationException ( ) ;
|
public class RelaxDefaultsParserConfiguration { /** * Insert the Relax NG defaults component */
protected void insertRelaxDefaultsComponent ( ) { } }
|
if ( fRelaxDefaults == null ) { fRelaxDefaults = new RelaxNGDefaultsComponent ( resolver ) ; addCommonComponent ( fRelaxDefaults ) ; fRelaxDefaults . reset ( this ) ; } XMLDocumentSource prev = fLastComponent ; fLastComponent = fRelaxDefaults ; XMLDocumentHandler next = prev . getDocumentHandler ( ) ; prev . setDocumentHandler ( fRelaxDefaults ) ; fRelaxDefaults . setDocumentSource ( prev ) ; if ( next != null ) { fRelaxDefaults . setDocumentHandler ( next ) ; next . setDocumentSource ( fRelaxDefaults ) ; }
|
public class MetaqTemplate { /** * Returns or create a message producer for topic .
* @ param topic
* @ return
* @ since 1.4.5 */
public MessageProducer getOrCreateProducer ( final String topic ) { } }
|
if ( ! this . shareProducer ) { FutureTask < MessageProducer > task = this . producers . get ( topic ) ; if ( task == null ) { task = new FutureTask < MessageProducer > ( new Callable < MessageProducer > ( ) { @ Override public MessageProducer call ( ) throws Exception { MessageProducer producer = MetaqTemplate . this . messageSessionFactory . createProducer ( ) ; producer . publish ( topic ) ; if ( ! StringUtils . isBlank ( MetaqTemplate . this . defaultTopic ) ) { producer . setDefaultTopic ( MetaqTemplate . this . defaultTopic ) ; } return producer ; } } ) ; FutureTask < MessageProducer > oldTask = this . producers . putIfAbsent ( topic , task ) ; if ( oldTask != null ) { task = oldTask ; } else { task . run ( ) ; } } try { MessageProducer producer = task . get ( ) ; return producer ; } catch ( ExecutionException e ) { throw ThreadUtils . launderThrowable ( e . getCause ( ) ) ; } catch ( InterruptedException e ) { Thread . currentThread ( ) . interrupt ( ) ; } } else { if ( this . sharedProducer == null ) { synchronized ( this ) { if ( this . sharedProducer == null ) { this . sharedProducer = this . messageSessionFactory . createProducer ( ) ; if ( ! StringUtils . isBlank ( this . defaultTopic ) ) { this . sharedProducer . setDefaultTopic ( this . defaultTopic ) ; } } } } this . sharedProducer . publish ( topic ) ; return this . sharedProducer ; } throw new IllegalStateException ( "Could not create producer for topic '" + topic + "'" ) ;
|
public class BaseTangramEngine { /** * Set original data list with type { @ link T } in Tangram .
* @ param data Original data with type { @ link T } . */
public void setData ( @ Nullable T data ) { } }
|
Preconditions . checkState ( mGroupBasicAdapter != null , "Must call bindView() first" ) ; List < Card > cards = mDataParser . parseGroup ( data , this ) ; this . setData ( cards ) ;
|
public class ResponseAttachmentInputStreamSupport { /** * Registers a set of streams that were associated with a particular request . Does nothing if { @ link # shutdown ( ) }
* has been invoked , in which case any use of the { @ link # getReadHandler ( ) read handler } will result in behavior
* equivalent to what would be seen if the the registered stream had 0 bytes of content .
* @ param operationId id of the request
* @ param streams the streams . Cannot be { @ code null } but may be empty */
synchronized void registerStreams ( int operationId , List < OperationResponse . StreamEntry > streams ) { } }
|
// ^ ^ ^ synchronize on ' this ' to avoid races with shutdown
if ( ! stopped ) { // Streams share a timestamp so activity on any is sufficient to keep the rest alive
AtomicLong timestamp = new AtomicLong ( System . currentTimeMillis ( ) ) ; for ( int i = 0 ; i < streams . size ( ) ; i ++ ) { OperationResponse . StreamEntry stream = streams . get ( i ) ; InputStreamKey key = new InputStreamKey ( operationId , i ) ; streamMap . put ( key , new TimedStreamEntry ( stream , timestamp ) ) ; } } else { // Just close the streams , as no caller ever will
for ( int i = 0 ; i < streams . size ( ) ; i ++ ) { closeStreamEntry ( streams . get ( i ) , operationId , i ) ; } }
|
public class SelenideTargetLocator { /** * Switch to window / tab by name / handle / title
* @ param nameOrHandleOrTitle name or handle or title of window / tab */
@ Override public WebDriver window ( String nameOrHandleOrTitle ) { } }
|
try { return Wait ( ) . until ( windowToBeAvailableAndSwitchToIt ( nameOrHandleOrTitle ) ) ; } catch ( TimeoutException e ) { throw new NoSuchWindowException ( "No window found with name or handle or title: " + nameOrHandleOrTitle , e ) ; }
|
public class UnionPathIterator { /** * Get the analysis bits for this walker , as defined in the WalkerFactory .
* @ return One of WalkerFactory # BIT _ DESCENDANT , etc . */
public int getAnalysisBits ( ) { } }
|
int bits = 0 ; if ( m_exprs != null ) { int n = m_exprs . length ; for ( int i = 0 ; i < n ; i ++ ) { int bit = m_exprs [ i ] . getAnalysisBits ( ) ; bits |= bit ; } } return bits ;
|
public class ExampleSegmentSuperpixels { /** * Segments and visualizes the image */
public static < T extends ImageBase < T > > void performSegmentation ( ImageSuperpixels < T > alg , T color ) { } }
|
// Segmentation often works better after blurring the image . Reduces high frequency image components which
// can cause over segmentation
GBlurImageOps . gaussian ( color , color , 0.5 , - 1 , null ) ; // Storage for segmented image . Each pixel will be assigned a label from 0 to N - 1 , where N is the number
// of segments in the image
GrayS32 pixelToSegment = new GrayS32 ( color . width , color . height ) ; // Segmentation magic happens here
alg . segment ( color , pixelToSegment ) ; // Displays the results
visualize ( pixelToSegment , color , alg . getTotalSuperpixels ( ) ) ;
|
public class ParsedColInfo { /** * Construct a ParsedColInfo from Volt XML .
* Allow caller to specify actions to finalize the parsed expression . */
static public ParsedColInfo fromOrderByXml ( AbstractParsedStmt parsedStmt , VoltXMLElement orderByXml , ExpressionAdjuster adjuster ) { } }
|
// make sure everything is kosher
assert ( orderByXml . name . equalsIgnoreCase ( "orderby" ) ) ; // get desc / asc
String desc = orderByXml . attributes . get ( "desc" ) ; boolean descending = ( desc != null ) && ( desc . equalsIgnoreCase ( "true" ) ) ; // get the columnref or other expression inside the orderby node
VoltXMLElement child = orderByXml . children . get ( 0 ) ; assert ( child != null ) ; // create the orderby column
ParsedColInfo orderCol = new ParsedColInfo ( ) ; orderCol . m_orderBy = true ; orderCol . m_ascending = ! descending ; AbstractExpression orderExpr = parsedStmt . parseExpressionTree ( child ) ; assert ( orderExpr != null ) ; orderCol . m_expression = adjuster . adjust ( orderExpr ) ; // Cases :
// child could be columnref , in which case it ' s either a normal column
// or an expression .
// The latter could be a case if this column came from a subquery that
// was optimized out .
// Just make a ParsedColInfo object for it and the planner will do the
// right thing later .
if ( orderExpr instanceof TupleValueExpression ) { TupleValueExpression tve = ( TupleValueExpression ) orderExpr ; orderCol . m_columnName = tve . getColumnName ( ) ; orderCol . m_tableName = tve . getTableName ( ) ; orderCol . m_tableAlias = tve . getTableAlias ( ) ; if ( orderCol . m_tableAlias == null ) { orderCol . m_tableAlias = orderCol . m_tableName ; } orderCol . m_alias = tve . getColumnAlias ( ) ; } else { String alias = child . attributes . get ( "alias" ) ; orderCol . m_alias = alias ; orderCol . m_tableName = AbstractParsedStmt . TEMP_TABLE_NAME ; orderCol . m_tableAlias = AbstractParsedStmt . TEMP_TABLE_NAME ; orderCol . m_columnName = "" ; // Replace its expression to TVE after we build the ExpressionIndexMap
if ( ( child . name . equals ( "operation" ) == false ) && ( child . name . equals ( "aggregation" ) == false ) && ( child . name . equals ( "win_aggregation" ) == false ) && ( child . name . equals ( "function" ) == false ) && ( child . name . equals ( "rank" ) == false ) && ( child . name . equals ( "value" ) == false ) && ( child . name . equals ( "columnref" ) == false ) ) { throw new RuntimeException ( "ORDER BY parsed with strange child node type: " + child . name ) ; } } return orderCol ;
|
public class AttributeDefinition { /** * Creates a returns a basic model node describing the attribute , after attaching it to the given overall resource
* description model node . The node describing the attribute is returned to make it easy to perform further
* modification .
* @ param resourceDescription the overall resource description
* @ param resolver provider of localized text descriptions
* @ param locale locale to pass to the resolver
* @ param bundle bundle to pass to the resolver
* @ return the attribute description node */
public ModelNode addResourceAttributeDescription ( final ModelNode resourceDescription , final ResourceDescriptionResolver resolver , final Locale locale , final ResourceBundle bundle ) { } }
|
final ModelNode attr = getNoTextDescription ( false ) ; final String description = resolver . getResourceAttributeDescription ( getName ( ) , locale , bundle ) ; attr . get ( ModelDescriptionConstants . DESCRIPTION ) . set ( description ) ; final ModelNode result = resourceDescription . get ( ModelDescriptionConstants . ATTRIBUTES , getName ( ) ) . set ( attr ) ; ModelNode deprecated = addDeprecatedInfo ( result ) ; if ( deprecated != null ) { deprecated . get ( ModelDescriptionConstants . REASON ) . set ( resolver . getResourceAttributeDeprecatedDescription ( getName ( ) , locale , bundle ) ) ; } addAccessConstraints ( result , locale ) ; return result ;
|
public class BoxApiBookmark { /** * Gets a request that moves a bookmark to another folder
* @ param id id of bookmark to move
* @ param parentId id of parent folder to move bookmark into
* @ return request to move a bookmark */
public BoxRequestsBookmark . UpdateBookmark getMoveRequest ( String id , String parentId ) { } }
|
BoxRequestsBookmark . UpdateBookmark request = new BoxRequestsBookmark . UpdateBookmark ( id , getBookmarkInfoUrl ( id ) , mSession ) ; request . setParentId ( parentId ) ; return request ;
|
public class SqlUtility { /** * Extract from value string every placeholder : { } , replace it with ? and
* then convert every field typeName with column typeName . The result is a
* pair : the first value is the elaborated string . The second is the list of
* parameters associated to ? . This second parameter is the list of
* parameters and replaced with ? .
* @ param value the value
* @ param method the method
* @ param entity the entity
* @ return Pair */
public static Pair < String , List < Pair < String , TypeName > > > extractParametersFromString ( String value , SQLiteModelMethod method , SQLiteEntity entity ) { } }
|
String whereStatement = value ; Pair < String , List < Pair < String , TypeName > > > result = new Pair < String , List < Pair < String , TypeName > > > ( ) ; result . value1 = new ArrayList < Pair < String , TypeName > > ( ) ; // replace placeholder : { } with ?
{ Matcher matcher = PARAMETER . matcher ( whereStatement ) ; String paramName ; StringBuffer buffer = new StringBuffer ( ) ; TypeName paramType ; while ( matcher . find ( ) ) { matcher . appendReplacement ( buffer , "?" ) ; paramName = SqlAnalyzer . extractParamName ( matcher ) ; paramType = method . findParameterTypeByAliasOrName ( paramName ) ; if ( paramType == null ) { throw ( new MethodParameterNotFoundException ( method , paramName ) ) ; } result . value1 . add ( new Pair < String , TypeName > ( paramName , paramType ) ) ; } matcher . appendTail ( buffer ) ; whereStatement = buffer . toString ( ) ; } // replace fields
{ Matcher matcher = WORD . matcher ( whereStatement ) ; StringBuffer buffer = new StringBuffer ( ) ; while ( matcher . find ( ) ) { ModelProperty property = entity . findPropertyByName ( matcher . group ( 1 ) ) ; if ( property != null ) { matcher . appendReplacement ( buffer , entity . findPropertyByName ( matcher . group ( 1 ) ) . columnName ) ; } } matcher . appendTail ( buffer ) ; whereStatement = buffer . toString ( ) ; } result . value0 = whereStatement ; return result ;
|
public class FileOutputCommitter { /** * Delete the temporary directory , including all of the work directories .
* This is called for all jobs whose final run state is SUCCEEDED
* @ param context the job ' s context . */
public void commitJob ( JobContext context ) throws IOException { } }
|
// delete the _ temporary folder
cleanupJob ( context ) ; // check if the o / p dir should be marked
if ( shouldMarkOutputDir ( context . getConfiguration ( ) ) ) { // create a _ success file in the o / p folder
markOutputDirSuccessful ( context ) ; }
|
public class Ifc4PackageImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
@ Override public EClass getIfcGeometricSetSelect ( ) { } }
|
if ( ifcGeometricSetSelectEClass == null ) { ifcGeometricSetSelectEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( Ifc4Package . eNS_URI ) . getEClassifiers ( ) . get ( 1134 ) ; } return ifcGeometricSetSelectEClass ;
|
public class Transition { /** * Pauses this transition , sending out calls to { @ link
* TransitionListener # onTransitionPause ( Transition ) } to all listeners
* and pausing all running animators started by this transition .
* @ hide */
public void pause ( @ NonNull View sceneRoot ) { } }
|
if ( ! mEnded ) { synchronized ( sRunningAnimators ) { ArrayMap < Animator , AnimationInfo > runningAnimators = getRunningAnimators ( ) ; int numOldAnims = runningAnimators . size ( ) ; Object windowId = ViewUtils . getWindowId ( sceneRoot ) ; for ( int i = numOldAnims - 1 ; i >= 0 ; i -- ) { AnimationInfo info = runningAnimators . valueAt ( i ) ; if ( info . view != null && windowId != null && windowId . equals ( info . windowId ) ) { Animator anim = runningAnimators . keyAt ( i ) ; AnimatorUtils . pause ( anim ) ; } } } if ( mListeners != null && mListeners . size ( ) > 0 ) { ArrayList < TransitionListener > tmpListeners = ( ArrayList < TransitionListener > ) mListeners . clone ( ) ; int numListeners = tmpListeners . size ( ) ; for ( int i = 0 ; i < numListeners ; ++ i ) { tmpListeners . get ( i ) . onTransitionPause ( this ) ; } } mPaused = true ; }
|
public class UniversalKafkaQueue { /** * { @ inheritDoc }
* @ since 0.7.0 */
@ Override public UniversalKafkaQueue init ( ) throws Exception { } }
|
super . init ( ) ; if ( getMessageFactory ( ) == null ) { setMessageFactory ( UniversalIdIntQueueMessageFactory . INSTANCE ) ; } return this ;
|
public class HmlUtils { /** * Create and return zero or more DNA HML Sequences read from the specified reader in FASTA format .
* @ param reader reader to read from , must not be null
* @ return zero or more DNA HML Sequences read from the specified reader in FASTA format
* @ throws IOException if an I / O error occurs */
public static Iterable < Sequence > createSequences ( final BufferedReader reader ) throws IOException { } }
|
checkNotNull ( reader ) ; List < Sequence > sequences = new ArrayList < Sequence > ( ) ; for ( SequenceIterator it = SeqIOTools . readFastaDNA ( reader ) ; it . hasNext ( ) ; ) { try { sequences . add ( createSequence ( it . nextSequence ( ) ) ) ; } catch ( BioException e ) { throw new IOException ( "could not read DNA sequences" , e ) ; } } return sequences ;
|
public class HtmlGraphicImage { /** * < p > Return the value of the < code > height < / code > property . < / p >
* < p > Contents : Override for the height of this image . */
public java . lang . String getHeight ( ) { } }
|
return ( java . lang . String ) getStateHelper ( ) . eval ( PropertyKeys . height ) ;
|
public class MultifactorAuthenticationTrustRecordCouchDbRepository { /** * Find by principal on or after date .
* @ param principal Principal to search for
* @ param onOrAfterDate start date for search
* @ return records for principal after date . */
@ View ( name = "by_principal_date" , map = "function(doc) { if (doc.recordKey && doc.principal && doc.deviceFingerprint && doc.recordDate) { emit([doc.principal, doc.recordDate], doc) } }" ) public List < CouchDbMultifactorAuthenticationTrustRecord > findByPrincipalAfterDate ( final String principal , final LocalDateTime onOrAfterDate ) { } }
|
val view = createQuery ( "by_principal_date" ) . startKey ( ComplexKey . of ( principal , onOrAfterDate ) ) . endKey ( ComplexKey . of ( principal , "999999" ) ) ; return db . queryView ( view , CouchDbMultifactorAuthenticationTrustRecord . class ) ;
|
public class DistributedQueue { /** * Start the queue . No other methods work until this is called
* @ throws Exception startup errors */
@ Override public void start ( ) throws Exception { } }
|
if ( ! state . compareAndSet ( State . LATENT , State . STARTED ) ) { throw new IllegalStateException ( ) ; } try { client . create ( ) . creatingParentContainersIfNeeded ( ) . forPath ( queuePath ) ; } catch ( KeeperException . NodeExistsException ignore ) { // this is OK
} if ( lockPath != null ) { try { client . create ( ) . creatingParentContainersIfNeeded ( ) . forPath ( lockPath ) ; } catch ( KeeperException . NodeExistsException ignore ) { // this is OK
} } if ( ! isProducerOnly || ( maxItems != QueueBuilder . NOT_SET ) ) { childrenCache . start ( ) ; } if ( ! isProducerOnly ) { service . submit ( new Callable < Object > ( ) { @ Override public Object call ( ) { runLoop ( ) ; return null ; } } ) ; }
|
public class Context { /** * Change the current error reporter .
* @ return the previous error reporter
* @ see org . mozilla . javascript . ErrorReporter */
public final ErrorReporter setErrorReporter ( ErrorReporter reporter ) { } }
|
if ( sealed ) onSealedMutation ( ) ; if ( reporter == null ) throw new IllegalArgumentException ( ) ; ErrorReporter old = getErrorReporter ( ) ; if ( reporter == old ) { return old ; } Object listeners = propertyListeners ; if ( listeners != null ) { firePropertyChangeImpl ( listeners , errorReporterProperty , old , reporter ) ; } this . errorReporter = reporter ; return old ;
|
public class PluginFile { /** * Executes an action .
* @ param action the action to execute
* @ throws PluginException */
void executeAction ( Action action ) throws PluginException { } }
|
try { switch ( action . actionType ) { case DELETE : this . logger . fine ( "Deleting " + action . parameter + "..." ) ; File f = new File ( action . parameter ) ; Utils . deleteFilesRecursively ( f ) ; break ; case DOWNLOAD : this . logger . fine ( "Downloading " + action . parameter + "..." ) ; URI uri = UriUtils . urlToUri ( action . parameter ) ; File targetFile = new File ( System . getProperty ( "java.io.tmpdir" ) , TMP_FILE ) ; InputStream in = null ; try { in = uri . toURL ( ) . openStream ( ) ; Utils . copyStream ( in , targetFile ) ; } finally { Utils . closeQuietly ( in ) ; } break ; case MOVE : List < String > parts = Utils . splitNicely ( action . parameter , "->" ) ; if ( parts . size ( ) != 2 ) { this . logger . warning ( "Invalid syntax for 'move' action. " + action . parameter ) ; } else { File source = new File ( parts . get ( 0 ) ) ; File target = new File ( parts . get ( 1 ) ) ; this . logger . fine ( "Moving " + source + " to " + target + "..." ) ; if ( ! source . renameTo ( target ) ) throw new IOException ( source + " could not be moved to " + target ) ; } break ; case COPY : parts = Utils . splitNicely ( action . parameter , "->" ) ; if ( parts . size ( ) != 2 ) { this . logger . warning ( "Invalid syntax for 'copy' action. " + action . parameter ) ; } else { File source = new File ( parts . get ( 0 ) ) ; File target = new File ( parts . get ( 1 ) ) ; this . logger . fine ( "Copying " + source + " to " + target + "..." ) ; Utils . copyStream ( source , target ) ; } break ; default : this . logger . fine ( "Ignoring the action..." ) ; break ; } } catch ( Exception e ) { throw new PluginException ( e ) ; }
|
public class PathPatternUtils { /** * Normalizes path . Returns string without character < / code > / < / code > at the end . */
private static String normalizePath ( String absPath ) { } }
|
if ( absPath . endsWith ( "/" ) ) { return absPath . substring ( 0 , absPath . length ( ) - 1 ) ; } return absPath ;
|
public class Parser { /** * When this is called , the keyword " foreach " has already been read . */
private ForeachStatement parseForeachStatement ( Token token ) throws IOException { } }
|
SourceInfo info = token . getSourceInfo ( ) ; token = peek ( ) ; if ( token . getID ( ) == Token . LPAREN ) { read ( ) ; } else { error ( "foreach.lparen.expected" , token ) ; } VariableRef loopVar = parseLValue ( ) ; // mod for declarative typing
boolean foundASToken = false ; Token asToken = peek ( ) ; if ( asToken . getID ( ) == Token . AS ) { foundASToken = true ; read ( ) ; TypeName typeName = parseTypeName ( ) ; SourceInfo info2 = peek ( ) . getSourceInfo ( ) ; loopVar . setVariable ( new Variable ( info2 , loopVar . getName ( ) , typeName , true ) ) ; } // end mod
token = peek ( ) ; if ( token . getID ( ) == Token . IN ) { read ( ) ; } else { error ( "foreach.in.expected" , token ) ; } Expression range = parseExpression ( ) ; Expression endRange = null ; token = peek ( ) ; if ( token . getID ( ) == Token . DOTDOT ) { read ( ) ; endRange = parseExpression ( ) ; token = peek ( ) ; } if ( endRange != null && foundASToken ) error ( "foreach.as.not.allowed" , asToken ) ; boolean reverse = false ; if ( token . getID ( ) == Token . REVERSE ) { read ( ) ; reverse = true ; token = peek ( ) ; } if ( token . getID ( ) == Token . RPAREN ) { read ( ) ; } else { error ( "foreach.rparen.expected" , token ) ; } Block body = parseBlock ( ) ; info = info . setEndPosition ( body . getSourceInfo ( ) ) ; return new ForeachStatement ( info , loopVar , range , endRange , reverse , body ) ;
|
public class LogRepositoryListener { /** * ( non - Javadoc )
* @ see org . eclipse . aether . util . listener . AbstractRepositoryListener #
* metadataInstalling ( org . eclipse . aether . RepositoryEvent ) */
@ Override public void metadataInstalling ( RepositoryEvent event ) { } }
|
log . finer ( "Installing " + event . getMetadata ( ) + " to " + event . getFile ( ) ) ;
|
public class JMXOverRMIServerSocketFactory { /** * ( non - Javadoc )
* @ see java . rmi . server . RMIServerSocketFactory # createServerSocket ( int ) */
@ Override public ServerSocket createServerSocket ( int port ) throws IOException { } }
|
ServerSocket socket = getSocketFactory ( ) . createServerSocket ( port , backLog , getListenAddress ( ) ) ; if ( manageSockets ) createdSockets . add ( socket ) ; return socket ;
|
public class JDBC4CallableStatement { /** * Retrieves the value of the designated JDBC TIMESTAMP parameter as a java . sql . Timestamp object , using the given Calendar object to construct the Timestamp object . */
@ Override public Timestamp getTimestamp ( int parameterIndex , Calendar cal ) throws SQLException { } }
|
checkClosed ( ) ; throw SQLError . noSupport ( ) ;
|
public class GoogleHadoopFileSystemBase { /** * Appends to an existing file ( optional operation ) . Not supported .
* @ param hadoopPath The existing file to be appended .
* @ param bufferSize The size of the buffer to be used .
* @ param progress For reporting progress if it is not null .
* @ return A writable stream .
* @ throws IOException if an error occurs . */
@ Override public FSDataOutputStream append ( Path hadoopPath , int bufferSize , Progressable progress ) throws IOException { } }
|
long startTime = System . nanoTime ( ) ; Preconditions . checkArgument ( hadoopPath != null , "hadoopPath must not be null" ) ; logger . atFine ( ) . log ( "GHFS.append: %s, bufferSize: %d (ignored)" , hadoopPath , bufferSize ) ; long duration = System . nanoTime ( ) - startTime ; increment ( Counter . APPEND ) ; increment ( Counter . APPEND_TIME , duration ) ; throw new IOException ( "The append operation is not supported." ) ;
|
public class ProhibitedCompoundRuleEvaluator { /** * TODO deduplicate / delegate */
private List < Map . Entry < Sentence , Map . Entry < Integer , Integer > > > getRelevantSentences ( List < String > inputs , String token , int maxSentences ) throws IOException { } }
|
List < Map . Entry < Sentence , Map . Entry < Integer , Integer > > > sentences = new ArrayList < > ( ) ; for ( String input : inputs ) { if ( new File ( input ) . isDirectory ( ) ) { File file = new File ( input , token + ".txt" ) ; if ( ! file . exists ( ) ) { throw new RuntimeException ( "File with example sentences not found: " + file ) ; } try ( FileInputStream fis = new FileInputStream ( file ) ) { SentenceSource sentenceSource = new PlainTextSentenceSource ( fis , language ) ; sentences = getSentencesFromSource ( inputs , token , maxSentences , sentenceSource ) ; } } else { SentenceSource sentenceSource = MixingSentenceSource . create ( inputs , language ) ; sentences = getSentencesFromSource ( inputs , token , maxSentences , sentenceSource ) ; } } return sentences ;
|
public class MarketplacePortletDefinition { /** * private method that sets the parentCategories field and the categories field This will ensure
* that the public methods { @ link # getParentCategories ( ) getParentCategories ( ) } and { @ link
* # getCategories ( ) getCategories ( ) } will not return null . Empty sets are allowed */
private void initCategories ( ) { } }
|
Set < PortletCategory > allCategories = new HashSet < PortletCategory > ( ) ; this . setParentCategories ( this . portletCategoryRegistry . getParentCategories ( this ) ) ; for ( PortletCategory childCategory : this . parentCategories ) { allCategories . add ( childCategory ) ; allCategories . addAll ( this . portletCategoryRegistry . getAllParentCategories ( childCategory ) ) ; } this . setCategories ( allCategories ) ;
|
public class CpuNDArrayFactory { /** * This method averages input arrays , and returns averaged array
* @ param target
* @ param arrays
* @ return */
@ Override public INDArray average ( INDArray target , INDArray [ ] arrays ) { } }
|
if ( arrays == null || arrays . length == 0 ) throw new RuntimeException ( "Input arrays are missing" ) ; if ( arrays . length == 1 ) { // Edge case - average 1 array - no op
if ( target == null ) { return null ; } return target . assign ( arrays [ 0 ] ) ; } long len = target != null ? target . lengthLong ( ) : arrays [ 0 ] . length ( ) ; PointerPointer dataPointers = new PointerPointer ( arrays . length ) ; val firstType = arrays [ 0 ] . dataType ( ) ; for ( int i = 0 ; i < arrays . length ; i ++ ) { Nd4j . getCompressor ( ) . autoDecompress ( arrays [ i ] ) ; Preconditions . checkArgument ( arrays [ i ] . dataType ( ) == firstType , "All arrays must have the same data type" ) ; if ( arrays [ i ] . elementWiseStride ( ) != 1 ) throw new ND4JIllegalStateException ( "Native averaging is applicable only to continuous INDArrays" ) ; if ( arrays [ i ] . lengthLong ( ) != len ) throw new ND4JIllegalStateException ( "All arrays should have equal length for averaging" ) ; dataPointers . put ( i , arrays [ i ] . data ( ) . addressPointer ( ) ) ; } nativeOps . average ( null , dataPointers , ( LongPointer ) arrays [ 0 ] . shapeInfoDataBuffer ( ) . addressPointer ( ) , null , null , target == null ? null : target . data ( ) . addressPointer ( ) , target == null ? null : ( LongPointer ) target . shapeInfoDataBuffer ( ) . addressPointer ( ) , null , null , arrays . length , len , true ) ; return target ;
|
public class MapMatching { /** * Filters GPX entries to only those which will be used for map matching ( i . e . those which
* are separated by at least 2 * measurementErrorSigman */
private List < Observation > filterGPXEntries ( List < Observation > gpxList ) { } }
|
List < Observation > filtered = new ArrayList < > ( ) ; Observation prevEntry = null ; int last = gpxList . size ( ) - 1 ; for ( int i = 0 ; i <= last ; i ++ ) { Observation gpxEntry = gpxList . get ( i ) ; if ( i == 0 || i == last || distanceCalc . calcDist ( prevEntry . getPoint ( ) . getLat ( ) , prevEntry . getPoint ( ) . getLon ( ) , gpxEntry . getPoint ( ) . getLat ( ) , gpxEntry . getPoint ( ) . getLon ( ) ) > 2 * measurementErrorSigma ) { filtered . add ( gpxEntry ) ; prevEntry = gpxEntry ; } else { logger . debug ( "Filter out GPX entry: {}" , i + 1 ) ; } } return filtered ;
|
public class HazelcastProperties { /** * Returns the configured value of a { @ link HazelcastProperty } converted to milliseconds if
* it is positive , otherwise returns the passed default value .
* @ param property the { @ link HazelcastProperty } to get the value from
* @ param defaultValue the default value to return if property has non positive value .
* @ return the value in milliseconds if it is positive , otherwise the passed default value .
* @ throws IllegalArgumentException if the { @ link HazelcastProperty } has no { @ link TimeUnit } */
public long getPositiveMillisOrDefault ( HazelcastProperty property , long defaultValue ) { } }
|
long millis = getMillis ( property ) ; return millis > 0 ? millis : defaultValue ;
|
public class ObjectFactory { /** * Create an instance of { @ link JAXBElement } { @ code < } { @ link ArithType } { @ code > } } */
@ XmlElementDecl ( namespace = "http://www.w3.org/1998/Math/MathML" , name = "minus" ) public JAXBElement < ArithType > createMinus ( ArithType value ) { } }
|
return new JAXBElement < ArithType > ( _Minus_QNAME , ArithType . class , null , value ) ;
|
public class Symbol { /** * Search for rectangles which have the same width and x position , and
* which join together vertically and merge them together to reduce the
* number of rectangles needed to describe a symbol . */
protected void mergeVerticalBlocks ( ) { } }
|
for ( int i = 0 ; i < rectangles . size ( ) - 1 ; i ++ ) { for ( int j = i + 1 ; j < rectangles . size ( ) ; j ++ ) { Rectangle2D . Double firstRect = rectangles . get ( i ) ; Rectangle2D . Double secondRect = rectangles . get ( j ) ; if ( roughlyEqual ( firstRect . x , secondRect . x ) && roughlyEqual ( firstRect . width , secondRect . width ) ) { if ( roughlyEqual ( firstRect . y + firstRect . height , secondRect . y ) ) { firstRect . height += secondRect . height ; rectangles . set ( i , firstRect ) ; rectangles . remove ( j ) ; } } } }
|
public class WaiterExecution { /** * Calls the custom delay strategy to control the sleep time
* @ param pollingStrategyContext Provides the polling strategy context .
* Includes request and number of retries
* attempted so far . */
private void safeCustomDelay ( PollingStrategyContext pollingStrategyContext ) { } }
|
try { pollingStrategy . getDelayStrategy ( ) . delayBeforeNextRetry ( pollingStrategyContext ) ; } catch ( InterruptedException e ) { Thread . currentThread ( ) . interrupt ( ) ; throw new RuntimeException ( e ) ; }
|
public class ConditionalCheck { /** * Ensures that a passed map as a parameter of the calling method is not empty .
* We recommend to use the overloaded method { @ link Check # notEmpty ( Collection , String ) } and pass as second argument
* the name of the parameter to enhance the exception message .
* @ param condition
* condition must be { @ code true } ^ so that the check will be performed
* @ param map
* a map which should not be empty
* @ throws IllegalNullArgumentException
* if the given argument { @ code map } is { @ code null }
* @ throws IllegalEmptyArgumentException
* if the given argument { @ code map } is empty */
@ ArgumentsChecked @ Throws ( { } }
|
IllegalNullArgumentException . class , IllegalEmptyArgumentException . class } ) public static < T extends Map < ? , ? > > void notEmpty ( final boolean condition , @ Nonnull final T map ) { if ( condition ) { Check . notEmpty ( map ) ; }
|
public class NumberUtilities { /** * Given an double string , it checks if it ' s a valid double ( based on apaches NumberUtils . createDouble )
* @ param doubleStr the double string to check
* @ return true if it ' s valid , otherwise false */
public static boolean isValidDouble ( @ Nullable final String doubleStr ) { } }
|
if ( StringUtils . isBlank ( doubleStr ) ) { return false ; } final String stripedDouble = StringUtils . strip ( doubleStr ) ; try { NumberUtils . createDouble ( stripedDouble ) ; return true ; } catch ( NumberFormatException e ) { return false ; }
|
public class MFPPush { /** * Set the default push notification options for notifications .
* @ param context - this is the Context of the application from getApplicationContext ( )
* @ param options - The MFPPushNotificationOptions with the default parameters */
private void setNotificationOptions ( Context context , MFPPushNotificationOptions options ) { } }
|
if ( this . appContext == null ) { this . appContext = context . getApplicationContext ( ) ; } this . options = options ; Gson gson = new Gson ( ) ; String json = gson . toJson ( options ) ; SharedPreferences sharedPreferences = appContext . getSharedPreferences ( PREFS_NAME , Context . MODE_PRIVATE ) ; MFPPushUtils . storeContentInSharedPreferences ( sharedPreferences , MFPPush . PREFS_MESSAGES_OPTIONS , json ) ;
|
public class CmsLogChannelTable { /** * Filters the table according to given search string . < p >
* @ param search string to be looked for . */
@ SuppressWarnings ( "unchecked" ) public void filterTable ( String search ) { } }
|
m_container . removeAllContainerFilters ( ) ; if ( CmsStringUtil . isNotEmptyOrWhitespaceOnly ( search ) ) { m_container . addContainerFilter ( new Or ( new SimpleStringFilter ( TableColumn . Channel , search , true , false ) , new SimpleStringFilter ( TableColumn . ParentChannel , search , true , false ) , new SimpleStringFilter ( TableColumn . File , search , true , false ) ) ) ; } if ( ( getValue ( ) != null ) & ! ( ( Set < Logger > ) getValue ( ) ) . isEmpty ( ) ) { setCurrentPageFirstItemId ( ( ( Set < Logger > ) getValue ( ) ) . iterator ( ) . next ( ) ) ; }
|
public class WebApplicationHandler { protected synchronized void doStop ( ) throws Exception { } }
|
try { // Stop servlets
super . doStop ( ) ; // Stop filters
for ( int i = _filters . size ( ) ; i -- > 0 ; ) { FilterHolder holder = ( FilterHolder ) _filters . get ( i ) ; holder . stop ( ) ; } } finally { _webApplicationContext = null ; _sessionListeners = null ; _requestListeners = null ; _requestAttributeListeners = null ; _contextAttributeListeners = null ; }
|
public class TokenCachingStrategy { /** * Puts the expiration date into a Bundle .
* @ param bundle
* A Bundle in which the expiration date should be stored .
* @ param value
* The Date representing the expiration date .
* @ throws NullPointerException if the passed in Bundle or date value are null */
public static void putExpirationDate ( Bundle bundle , Date value ) { } }
|
Validate . notNull ( bundle , "bundle" ) ; Validate . notNull ( value , "value" ) ; putDate ( bundle , EXPIRATION_DATE_KEY , value ) ;
|
public class WatermarkDecrementer { /** * { @ inheritDoc } */
public boolean shouldDestroy ( ConnectionListener cl , long timeout , int currentSize , int minPoolSize , int destroyed ) { } }
|
if ( watermark < 0 ) return currentSize > minPoolSize ; return watermark < currentSize ;
|
public class ConfluenceGreenPepper { /** * < p > Getter for the field < code > spacePermissionManager < / code > . < / p >
* @ return a { @ link com . atlassian . confluence . security . SpacePermissionManager } object . */
public SpacePermissionManager getSpacePermissionManager ( ) { } }
|
if ( spacePermissionManager != null ) { return spacePermissionManager ; } spacePermissionManager = ( SpacePermissionManager ) ContainerManager . getComponent ( "spacePermissionManager" ) ; return spacePermissionManager ;
|
public class ClassUtils { /** * < p > Converts an array of { @ code Object } in to an array of { @ code Class } objects .
* If any of these objects is null , a null element will be inserted into the array . < / p >
* < p > This method returns { @ code null } for a { @ code null } input array . < / p >
* @ param array an { @ code Object } array
* @ return a { @ code Class } array , { @ code null } if null array input
* @ since 2.4 */
public static Class < ? > [ ] toClass ( final Object ... array ) { } }
|
if ( array == null ) { return null ; } else if ( array . length == 0 ) { return ArrayUtils . EMPTY_CLASS_ARRAY ; } final Class < ? > [ ] classes = new Class < ? > [ array . length ] ; for ( int i = 0 ; i < array . length ; i ++ ) { classes [ i ] = array [ i ] == null ? null : array [ i ] . getClass ( ) ; } return classes ;
|
public class VTimeZone { /** * Append the UNTIL attribute after RRULE line */
private static void appendUNTIL ( Writer writer , String until ) throws IOException { } }
|
if ( until != null ) { writer . write ( SEMICOLON ) ; writer . write ( ICAL_UNTIL ) ; writer . write ( EQUALS_SIGN ) ; writer . write ( until ) ; }
|
public class Config { /** * Extends the runtime classpath to include the files or directories specified .
* @ param paths one or more strings representing a single JAR file or a directory containing JARs .
* @ since 1.0.0 */
public void expandClasspath ( String ... paths ) { } }
|
if ( paths == null || paths . length == 0 ) { return ; } for ( String path : paths ) { expandClasspath ( new File ( PathUtil . resolve ( path ) ) ) ; }
|
public class GeometryMergeService { /** * End the merging process by effectively executing the merge operation and returning the result through a
* call - back .
* @ param callback The call - back function that will receive the merged geometry .
* @ throws GeometryMergeException Thrown in case the merging process has not been started or some other merging
* error . */
public void stop ( final GeometryFunction callback ) throws GeometryMergeException { } }
|
if ( ! busy ) { throw new GeometryMergeException ( "Can't stop the merging process since it is not activated." ) ; } if ( callback == null ) { cancel ( ) ; return ; } merge ( new GeometryFunction ( ) { public void execute ( Geometry geometry ) { callback . execute ( geometry ) ; try { clearGeometries ( ) ; } catch ( GeometryMergeException e ) { } busy = false ; eventBus . fireEvent ( new GeometryMergeStopEvent ( geometry ) ) ; } } ) ;
|
public class CommerceAccountUserRelPersistenceImpl { /** * Returns an ordered range of all the commerce account user rels where commerceAccountUserId = & # 63 ; .
* Useful when paginating results . Returns a maximum of < code > end - start < / code > instances . < code > start < / code > and < code > end < / code > are not primary keys , they are indexes in the result set . Thus , < code > 0 < / code > refers to the first result in the set . Setting both < code > start < / code > and < code > end < / code > to { @ link QueryUtil # ALL _ POS } will return the full result set . If < code > orderByComparator < / code > is specified , then the query will include the given ORDER BY logic . If < code > orderByComparator < / code > is absent and pagination is required ( < code > start < / code > and < code > end < / code > are not { @ link QueryUtil # ALL _ POS } ) , then the query will include the default ORDER BY logic from { @ link CommerceAccountUserRelModelImpl } . If both < code > orderByComparator < / code > and pagination are absent , for performance reasons , the query will not have an ORDER BY clause and the returned result set will be sorted on by the primary key in an ascending order .
* @ param commerceAccountUserId the commerce account user ID
* @ param start the lower bound of the range of commerce account user rels
* @ param end the upper bound of the range of commerce account user rels ( not inclusive )
* @ param orderByComparator the comparator to order the results by ( optionally < code > null < / code > )
* @ param retrieveFromCache whether to retrieve from the finder cache
* @ return the ordered range of matching commerce account user rels */
@ Override public List < CommerceAccountUserRel > findByCommerceAccountUserId ( long commerceAccountUserId , int start , int end , OrderByComparator < CommerceAccountUserRel > orderByComparator , boolean retrieveFromCache ) { } }
|
boolean pagination = true ; FinderPath finderPath = null ; Object [ ] finderArgs = null ; if ( ( start == QueryUtil . ALL_POS ) && ( end == QueryUtil . ALL_POS ) && ( orderByComparator == null ) ) { pagination = false ; finderPath = FINDER_PATH_WITHOUT_PAGINATION_FIND_BY_COMMERCEACCOUNTUSERID ; finderArgs = new Object [ ] { commerceAccountUserId } ; } else { finderPath = FINDER_PATH_WITH_PAGINATION_FIND_BY_COMMERCEACCOUNTUSERID ; finderArgs = new Object [ ] { commerceAccountUserId , start , end , orderByComparator } ; } List < CommerceAccountUserRel > list = null ; if ( retrieveFromCache ) { list = ( List < CommerceAccountUserRel > ) finderCache . getResult ( finderPath , finderArgs , this ) ; if ( ( list != null ) && ! list . isEmpty ( ) ) { for ( CommerceAccountUserRel commerceAccountUserRel : list ) { if ( ( commerceAccountUserId != commerceAccountUserRel . getCommerceAccountUserId ( ) ) ) { list = null ; break ; } } } } if ( list == null ) { StringBundler query = null ; if ( orderByComparator != null ) { query = new StringBundler ( 3 + ( orderByComparator . getOrderByFields ( ) . length * 2 ) ) ; } else { query = new StringBundler ( 3 ) ; } query . append ( _SQL_SELECT_COMMERCEACCOUNTUSERREL_WHERE ) ; query . append ( _FINDER_COLUMN_COMMERCEACCOUNTUSERID_COMMERCEACCOUNTUSERID_2 ) ; if ( orderByComparator != null ) { appendOrderByComparator ( query , _ORDER_BY_ENTITY_ALIAS , orderByComparator ) ; } else if ( pagination ) { query . append ( CommerceAccountUserRelModelImpl . ORDER_BY_JPQL ) ; } String sql = query . toString ( ) ; Session session = null ; try { session = openSession ( ) ; Query q = session . createQuery ( sql ) ; QueryPos qPos = QueryPos . getInstance ( q ) ; qPos . add ( commerceAccountUserId ) ; if ( ! pagination ) { list = ( List < CommerceAccountUserRel > ) QueryUtil . list ( q , getDialect ( ) , start , end , false ) ; Collections . sort ( list ) ; list = Collections . unmodifiableList ( list ) ; } else { list = ( List < CommerceAccountUserRel > ) QueryUtil . list ( q , getDialect ( ) , start , end ) ; } cacheResult ( list ) ; finderCache . putResult ( finderPath , finderArgs , list ) ; } catch ( Exception e ) { finderCache . removeResult ( finderPath , finderArgs ) ; throw processException ( e ) ; } finally { closeSession ( session ) ; } } return list ;
|
public class CRLNumberExtension { /** * Write the extension to the DerOutputStream .
* @ param out the DerOutputStream to write the extension to .
* @ exception IOException on encoding errors . */
public void encode ( OutputStream out ) throws IOException { } }
|
DerOutputStream tmp = new DerOutputStream ( ) ; encode ( out , PKIXExtensions . CRLNumber_Id , true ) ;
|
public class AmazonConfigClient { /** * Returns the details of one or more retention configurations . If the retention configuration name is not
* specified , this action returns the details for all the retention configurations for that account .
* < note >
* Currently , AWS Config supports only one retention configuration per region in your account .
* < / note >
* @ param describeRetentionConfigurationsRequest
* @ return Result of the DescribeRetentionConfigurations operation returned by the service .
* @ throws InvalidParameterValueException
* One or more of the specified parameters are invalid . Verify that your parameters are valid and try again .
* @ throws NoSuchRetentionConfigurationException
* You have specified a retention configuration that does not exist .
* @ throws InvalidNextTokenException
* The specified next token is invalid . Specify the < code > nextToken < / code > string that was returned in the
* previous response to get the next page of results .
* @ sample AmazonConfig . DescribeRetentionConfigurations
* @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / config - 2014-11-12 / DescribeRetentionConfigurations "
* target = " _ top " > AWS API Documentation < / a > */
@ Override public DescribeRetentionConfigurationsResult describeRetentionConfigurations ( DescribeRetentionConfigurationsRequest request ) { } }
|
request = beforeClientExecution ( request ) ; return executeDescribeRetentionConfigurations ( request ) ;
|
public class CountersTable { /** * Count forward until the given node is found , or until
* we have looked to the given amount .
* @ param support The XPath context to use
* @ param numberElem The given xsl : number element .
* @ param node The node to count .
* @ return The node count , or 0 if not found .
* @ throws TransformerException */
public int countNode ( XPathContext support , ElemNumber numberElem , int node ) throws TransformerException { } }
|
int count = 0 ; Vector counters = getCounters ( numberElem ) ; int nCounters = counters . size ( ) ; // XPath countMatchPattern = numberElem . getCountMatchPattern ( support , node ) ;
// XPath fromMatchPattern = numberElem . m _ fromMatchPattern ;
int target = numberElem . getTargetNode ( support , node ) ; if ( DTM . NULL != target ) { for ( int i = 0 ; i < nCounters ; i ++ ) { Counter counter = ( Counter ) counters . elementAt ( i ) ; count = counter . getPreviouslyCounted ( support , target ) ; if ( count > 0 ) return count ; } // In the loop below , we collect the nodes in backwards doc order , so
// we don ' t have to do inserts , but then we store the nodes in forwards
// document order , so we don ' t have to insert nodes into that list ,
// so that ' s what the appendBtoFList stuff is all about . In cases
// of forward counting by one , this will mean a single node copy from
// the backwards list ( m _ newFound ) to the forwards list ( counter . m _ countNodes ) .
count = 0 ; if ( m_newFound == null ) m_newFound = new NodeSetDTM ( support . getDTMManager ( ) ) ; for ( ; DTM . NULL != target ; target = numberElem . getPreviousNode ( support , target ) ) { // First time in , we should not have to check for previous counts ,
// since the original target node was already checked in the
// block above .
if ( 0 != count ) { for ( int i = 0 ; i < nCounters ; i ++ ) { Counter counter = ( Counter ) counters . elementAt ( i ) ; int cacheLen = counter . m_countNodes . size ( ) ; if ( ( cacheLen > 0 ) && ( counter . m_countNodes . elementAt ( cacheLen - 1 ) == target ) ) { count += ( cacheLen + counter . m_countNodesStartCount ) ; if ( cacheLen > 0 ) appendBtoFList ( counter . m_countNodes , m_newFound ) ; m_newFound . removeAllElements ( ) ; return count ; } } } m_newFound . addElement ( target ) ; count ++ ; } // If we got to this point , then we didn ' t find a counter , so make
// one and add it to the list .
Counter counter = new Counter ( numberElem , new NodeSetDTM ( support . getDTMManager ( ) ) ) ; m_countersMade ++ ; // for diagnostics
appendBtoFList ( counter . m_countNodes , m_newFound ) ; m_newFound . removeAllElements ( ) ; counters . addElement ( counter ) ; } return count ;
|
public class KafkaMsgConsumer { /** * Gets a buffer to store consumed messages from a Kafka topic .
* @ param topic
* @ return
* @ since 1.2.0 */
private BlockingQueue < ConsumerRecord < String , byte [ ] > > _getBuffer ( String topic ) { } }
|
BlockingQueue < ConsumerRecord < String , byte [ ] > > buffer = topicBuffers . get ( topic ) ; if ( buffer == null ) { buffer = new LinkedBlockingQueue < ConsumerRecord < String , byte [ ] > > ( ) ; BlockingQueue < ConsumerRecord < String , byte [ ] > > existingBuffer = topicBuffers . putIfAbsent ( topic , buffer ) ; if ( existingBuffer != null ) { buffer = existingBuffer ; } } return buffer ;
|
public class AllFilterPanel { /** * This method initializes jScrollPane
* @ return javax . swing . JScrollPane */
private JScrollPane getJScrollPane ( ) { } }
|
if ( jScrollPane == null ) { jScrollPane = new JScrollPane ( ) ; jScrollPane . setViewportView ( getTableFilter ( ) ) ; jScrollPane . setBorder ( javax . swing . BorderFactory . createEtchedBorder ( javax . swing . border . EtchedBorder . RAISED ) ) ; jScrollPane . setEnabled ( false ) ; } return jScrollPane ;
|
public class SpriteSheet { /** * Get a sprite at a particular cell on the sprite sheet
* @ param x The x position of the cell on the sprite sheet
* @ param y The y position of the cell on the sprite sheet
* @ return The single image from the sprite sheet */
public Image getSprite ( int x , int y ) { } }
|
target . init ( ) ; initImpl ( ) ; if ( ( x < 0 ) || ( x >= subImages . length ) ) { throw new RuntimeException ( "SubImage out of sheet bounds: " + x + "," + y ) ; } if ( ( y < 0 ) || ( y >= subImages [ 0 ] . length ) ) { throw new RuntimeException ( "SubImage out of sheet bounds: " + x + "," + y ) ; } return target . getSubImage ( x * ( tw + spacing ) + margin , y * ( th + spacing ) + margin , tw , th ) ;
|
public class DefaultAgenda { /** * ( non - Javadoc )
* @ see org . kie . common . AgendaI # clearAgendaGroup ( java . lang . String ) */
public void clearAndCancelAgendaGroup ( final String name ) { } }
|
InternalAgendaGroup agendaGroup = this . agendaGroups . get ( name ) ; if ( agendaGroup != null ) { clearAndCancelAgendaGroup ( agendaGroup ) ; }
|
public class TemporalProcedures { /** * Format a temporal value to a String
* @ param input Any temporal type
* @ param format A valid DateTime format pattern ( ie yyyy - MM - dd ' T ' HH : mm : ss . SSSS )
* @ return */
@ UserFunction ( "apoc.temporal.format" ) @ Description ( "apoc.temporal.format(input, format) | Format a temporal value" ) public String format ( @ Name ( "temporal" ) Object input , @ Name ( value = "format" , defaultValue = "yyyy-MM-dd" ) String format ) { } }
|
try { DateTimeFormatter formatter = getOrCreate ( format ) ; if ( input instanceof LocalDate ) { return ( ( LocalDate ) input ) . format ( formatter ) ; } else if ( input instanceof ZonedDateTime ) { return ( ( ZonedDateTime ) input ) . format ( formatter ) ; } else if ( input instanceof LocalDateTime ) { return ( ( LocalDateTime ) input ) . format ( formatter ) ; } else if ( input instanceof LocalTime ) { return ( ( LocalTime ) input ) . format ( formatter ) ; } else if ( input instanceof OffsetTime ) { return ( ( OffsetTime ) input ) . format ( formatter ) ; } else if ( input instanceof DurationValue ) { return formatDuration ( input , format ) ; } } catch ( Exception e ) { throw new RuntimeException ( "Available formats are:\n" + String . join ( "\n" , getTypes ( ) ) + "\nSee also: https://www.elastic.co/guide/en/elasticsearch/reference/5.5/mapping-date-format.html#built-in-date-formats " + "and https://docs.oracle.com/javase/8/docs/api/java/time/format/DateTimeFormatter.html" ) ; } return input . toString ( ) ;
|
public class DocumentRootImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
public void setBorderColor ( String newBorderColor ) { } }
|
String oldBorderColor = borderColor ; borderColor = newBorderColor ; if ( eNotificationRequired ( ) ) eNotify ( new ENotificationImpl ( this , Notification . SET , ColorPackage . DOCUMENT_ROOT__BORDER_COLOR , oldBorderColor , borderColor ) ) ;
|
public class DefaultExceptionResolver { /** * { @ inheritDoc } */
public Throwable resolveException ( ObjectNode response ) { } }
|
ObjectNode errorObject = ObjectNode . class . cast ( response . get ( JsonRpcBasicServer . ERROR ) ) ; if ( ! hasNonNullObjectData ( errorObject , JsonRpcBasicServer . DATA ) ) return createJsonRpcClientException ( errorObject ) ; ObjectNode dataObject = ObjectNode . class . cast ( errorObject . get ( JsonRpcBasicServer . DATA ) ) ; if ( ! hasNonNullTextualData ( dataObject , JsonRpcBasicServer . EXCEPTION_TYPE_NAME ) ) return createJsonRpcClientException ( errorObject ) ; try { String exceptionTypeName = dataObject . get ( JsonRpcBasicServer . EXCEPTION_TYPE_NAME ) . asText ( ) ; String message = hasNonNullTextualData ( dataObject , JsonRpcBasicServer . ERROR_MESSAGE ) ? dataObject . get ( JsonRpcBasicServer . ERROR_MESSAGE ) . asText ( ) : null ; return createThrowable ( exceptionTypeName , message ) ; } catch ( Exception e ) { logger . warn ( "Unable to create throwable" , e ) ; return createJsonRpcClientException ( errorObject ) ; }
|
public class DefuzzifierFactory { /** * Creates a Defuzzifier by executing the registered constructor
* @ param key is the unique name by which constructors are registered
* @ param resolution is the resolution of an IntegralDefuzzifier
* @ param type is the type of a WeightedDefuzzifier
* @ return a Defuzzifier by executing the registered constructor and setting
* its resolution or type accordingly */
public Defuzzifier constructDefuzzifier ( String key , int resolution , WeightedDefuzzifier . Type type ) { } }
|
Defuzzifier result = constructObject ( key ) ; if ( result instanceof IntegralDefuzzifier ) { ( ( IntegralDefuzzifier ) result ) . setResolution ( resolution ) ; } else if ( result instanceof WeightedDefuzzifier ) { ( ( WeightedDefuzzifier ) result ) . setType ( type ) ; } return result ;
|
public class ParsedOptions { /** * Return a version of this { @ code Option } converted to a particular type .
* @ param < T > type to attempt to convert to
* @ param option the option
* @ return the value parsed into a particular object
* @ throws OptionParserException if there are problems turning the option value into the desired type */
@ SuppressWarnings ( "unchecked" ) public < T > T getTypedValue ( Option option ) throws OptionParserException { } }
|
if ( option == null ) { return null ; } String value = getValue ( option ) ; if ( value == null ) { return null ; } OptionValueType valueType = option . getValueType ( ) ; if ( valueType == OptionValueType . STRING ) { return ( T ) value ; } else if ( valueType == OptionValueType . INT ) { try { return ( T ) Integer . valueOf ( value ) ; } catch ( NumberFormatException e ) { throw new OptionParserException ( e . getMessage ( ) ) ; } } else if ( valueType == OptionValueType . LONG ) { try { return ( T ) Long . valueOf ( value ) ; } catch ( NumberFormatException e ) { throw new OptionParserException ( e . getMessage ( ) ) ; } } else if ( valueType == OptionValueType . FLOAT ) { try { return ( T ) Float . valueOf ( value ) ; } catch ( NumberFormatException e ) { throw new OptionParserException ( e . getMessage ( ) ) ; } } else if ( valueType == OptionValueType . DOUBLE ) { try { return ( T ) Double . valueOf ( value ) ; } catch ( NumberFormatException e ) { throw new OptionParserException ( e . getMessage ( ) ) ; } } else if ( valueType == OptionValueType . BOOLEAN ) { try { return ( T ) Boolean . valueOf ( value ) ; } catch ( NumberFormatException e ) { throw new OptionParserException ( e . getMessage ( ) ) ; } } else if ( valueType == OptionValueType . FILE ) { return ( T ) new File ( value ) ; } else { return null ; }
|
public class ProcessEngineConfigurationImpl { public void initServices ( ) { } }
|
initService ( repositoryService ) ; initService ( runtimeService ) ; initService ( historyService ) ; initService ( taskService ) ; initService ( managementService ) ; initService ( dynamicBpmnService ) ;
|
public class PowerOfTwoFileAllocator { /** * Remove from the tree .
* @ param x the item to remove . */
private Region remove ( Region x ) { } }
|
this . deletedNode = NULL_NODE ; this . root = remove ( x , this . root ) ; Region d = this . deletedElement ; // deletedElement is set to null to free the reference ,
// deletedNode is not freed as it will endup pointing to a valid node .
this . deletedElement = null ; if ( d == null ) { return null ; } else { return new Region ( d ) ; }
|
public class BigtableClusterUtilities { /** * Gets the current configuration of the cluster as encapsulated by a { @ link Cluster } object .
* @ param clusterId
* @ param zoneId
* @ return the { @ link Cluster } if it was set . If the cluster is not found , throw a { @ link
* NullPointerException } . */
public Cluster getCluster ( String clusterId , String zoneId ) { } }
|
Cluster response = null ; for ( Cluster cluster : getClusters ( ) . getClustersList ( ) ) { if ( cluster . getName ( ) . endsWith ( "/clusters/" + clusterId ) && cluster . getLocation ( ) . endsWith ( "/locations/" + zoneId ) ) { if ( response == null ) { response = cluster ; } else { throw new IllegalStateException ( String . format ( "Got multiple clusters named %s in zone %z." , clusterId , zoneId ) ) ; } } } return Preconditions . checkNotNull ( response , String . format ( "Cluster %s in zone %s was not found." , clusterId , zoneId ) ) ;
|
public class ClassScaner { /** * 通过过滤器 , 是否满足接受此类的条件
* @ param clazz 类
* @ return 是否接受 */
private void addIfAccept ( String className ) { } }
|
if ( StrUtil . isBlank ( className ) ) { return ; } int classLen = className . length ( ) ; int packageLen = this . packageName . length ( ) ; if ( classLen == packageLen ) { // 类名和包名长度一致 , 用户可能传入的包名是类名
if ( className . equals ( this . packageName ) ) { addIfAccept ( loadClass ( className ) ) ; } } else if ( classLen > packageLen ) { // 检查类名是否以指定包名为前缀 , 包名后加 . ( 避免类似于cn . hutool . A和cn . hutool . ATest这类类名引起的歧义 )
if ( className . startsWith ( this . packageNameWithDot ) ) { addIfAccept ( loadClass ( className ) ) ; } }
|
public class AnnotationTypeFieldBuilder { /** * Build the member documentation .
* @ param node the XML element that specifies which components to document
* @ param memberDetailsTree the content tree to which the documentation will be added
* @ throws DocletException if there is a problem while building the documentation */
public void buildAnnotationTypeMember ( XMLNode node , Content memberDetailsTree ) throws DocletException { } }
|
if ( writer == null ) { return ; } if ( hasMembersToDocument ( ) ) { writer . addAnnotationFieldDetailsMarker ( memberDetailsTree ) ; Element lastElement = members . get ( members . size ( ) - 1 ) ; for ( Element member : members ) { currentMember = member ; Content detailsTree = writer . getMemberTreeHeader ( ) ; writer . addAnnotationDetailsTreeHeader ( typeElement , detailsTree ) ; Content annotationDocTree = writer . getAnnotationDocTreeHeader ( currentMember , detailsTree ) ; buildChildren ( node , annotationDocTree ) ; detailsTree . addContent ( writer . getAnnotationDoc ( annotationDocTree , currentMember == lastElement ) ) ; memberDetailsTree . addContent ( writer . getAnnotationDetails ( detailsTree ) ) ; } }
|
public class CmsDbSynchronizationView { /** * Adds validators to the input fields . < p > */
protected void addValidators ( ) { } }
|
m_target . removeAllValidators ( ) ; m_target . addValidator ( new TargetValidator ( ) ) ; for ( Component c : m_componentsToValidate ) { if ( c instanceof CmsPathSelectField ) { ( ( CmsPathSelectField ) c ) . removeAllValidators ( ) ; ( ( CmsPathSelectField ) c ) . addValidator ( new ResourceValidator ( ) ) ; } }
|
public class WriterUtils { /** * Creates { @ link Path } for case { @ link WriterFilePathType # NAMESPACE _ TABLE } with configurations
* { @ link ConfigurationKeys # EXTRACT _ NAMESPACE _ NAME _ KEY } and { @ link ConfigurationKeys # EXTRACT _ TABLE _ NAME _ KEY }
* @ param state
* @ return a path */
public static Path getNamespaceTableWriterFilePath ( State state ) { } }
|
Preconditions . checkArgument ( state . contains ( ConfigurationKeys . EXTRACT_NAMESPACE_NAME_KEY ) ) ; Preconditions . checkArgument ( state . contains ( ConfigurationKeys . EXTRACT_TABLE_NAME_KEY ) ) ; String namespace = state . getProp ( ConfigurationKeys . EXTRACT_NAMESPACE_NAME_KEY ) . replaceAll ( "\\." , Path . SEPARATOR ) ; return new Path ( namespace + Path . SEPARATOR + state . getProp ( ConfigurationKeys . EXTRACT_TABLE_NAME_KEY ) ) ;
|
public class GosuStringUtil { /** * < p > Search a String to find the first index of any
* character not in the given set of characters . < / p >
* < p > A < code > null < / code > String will return < code > - 1 < / code > .
* A < code > null < / code > search string will return < code > - 1 < / code > . < / p >
* < pre >
* GosuStringUtil . indexOfAnyBut ( null , * ) = - 1
* GosuStringUtil . indexOfAnyBut ( " " , * ) = - 1
* GosuStringUtil . indexOfAnyBut ( * , null ) = - 1
* GosuStringUtil . indexOfAnyBut ( * , " " ) = - 1
* GosuStringUtil . indexOfAnyBut ( " zzabyycdxx " , " za " ) = 3
* GosuStringUtil . indexOfAnyBut ( " zzabyycdxx " , " " ) = 0
* GosuStringUtil . indexOfAnyBut ( " aba " , " ab " ) = - 1
* < / pre >
* @ param str the String to check , may be null
* @ param searchChars the chars to search for , may be null
* @ return the index of any of the chars , - 1 if no match or null input
* @ since 2.0 */
public static int indexOfAnyBut ( String str , String searchChars ) { } }
|
if ( isEmpty ( str ) || isEmpty ( searchChars ) ) { return - 1 ; } for ( int i = 0 ; i < str . length ( ) ; i ++ ) { if ( searchChars . indexOf ( str . charAt ( i ) ) < 0 ) { return i ; } } return - 1 ;
|
public class BpsimFactoryImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
@ Override public Object createFromString ( EDataType eDataType , String initialValue ) { } }
|
switch ( eDataType . getClassifierID ( ) ) { case BpsimPackage . RESULT_TYPE : return createResultTypeFromString ( eDataType , initialValue ) ; case BpsimPackage . TIME_UNIT : return createTimeUnitFromString ( eDataType , initialValue ) ; case BpsimPackage . RESULT_TYPE_OBJECT : return createResultTypeObjectFromString ( eDataType , initialValue ) ; case BpsimPackage . TIME_UNIT_OBJECT : return createTimeUnitObjectFromString ( eDataType , initialValue ) ; default : throw new IllegalArgumentException ( "The datatype '" + eDataType . getName ( ) + "' is not a valid classifier" ) ; }
|
public class QuickResultMatcherBase { /** * { @ inheritDoc } */
@ Override public void describeMismatch ( Object item , Description description ) { } }
|
MatchResult . Mismatch < ? > mismatch = matchResult ( item ) . getMismatch ( ) ; if ( mismatch != null ) { mismatch . describeMismatch ( description ) ; }
|
public class BytesBufferHandler { /** * { @ inheritDoc } */
@ Override public int readData ( final ByteBuffer target ) { } }
|
final int toRead = Math . min ( target . remaining ( ) , ( int ) input . readRemaining ( ) ) ; for ( int i = 0 ; i < toRead ; i ++ ) { target . put ( input . readByte ( ) ) ; } return toRead ;
|
public class InternalPureXbaseParser { /** * InternalPureXbase . g : 500:1 : ruleInitialisedVariableDeclaration returns [ EObject current = null ] : ( ( ) ( ( ( lv _ writeable _ 1_0 = ' var ' ) ) | otherlv _ 2 = ' val ' ) ( ( ( ( ( ( ruleJvmTypeReference ) ) ( ( ruleValidID ) ) ) ) = > ( ( ( lv _ type _ 3_0 = ruleJvmTypeReference ) ) ( ( lv _ name _ 4_0 = ruleValidID ) ) ) ) | ( ( lv _ name _ 5_0 = ruleValidID ) ) ) otherlv _ 6 = ' = ' ( ( lv _ right _ 7_0 = ruleXExpression ) ) ) ; */
public final EObject ruleInitialisedVariableDeclaration ( ) throws RecognitionException { } }
|
EObject current = null ; Token lv_writeable_1_0 = null ; Token otherlv_2 = null ; Token otherlv_6 = null ; EObject lv_type_3_0 = null ; AntlrDatatypeRuleToken lv_name_4_0 = null ; AntlrDatatypeRuleToken lv_name_5_0 = null ; EObject lv_right_7_0 = null ; enterRule ( ) ; try { // InternalPureXbase . g : 506:2 : ( ( ( ) ( ( ( lv _ writeable _ 1_0 = ' var ' ) ) | otherlv _ 2 = ' val ' ) ( ( ( ( ( ( ruleJvmTypeReference ) ) ( ( ruleValidID ) ) ) ) = > ( ( ( lv _ type _ 3_0 = ruleJvmTypeReference ) ) ( ( lv _ name _ 4_0 = ruleValidID ) ) ) ) | ( ( lv _ name _ 5_0 = ruleValidID ) ) ) otherlv _ 6 = ' = ' ( ( lv _ right _ 7_0 = ruleXExpression ) ) ) )
// InternalPureXbase . g : 507:2 : ( ( ) ( ( ( lv _ writeable _ 1_0 = ' var ' ) ) | otherlv _ 2 = ' val ' ) ( ( ( ( ( ( ruleJvmTypeReference ) ) ( ( ruleValidID ) ) ) ) = > ( ( ( lv _ type _ 3_0 = ruleJvmTypeReference ) ) ( ( lv _ name _ 4_0 = ruleValidID ) ) ) ) | ( ( lv _ name _ 5_0 = ruleValidID ) ) ) otherlv _ 6 = ' = ' ( ( lv _ right _ 7_0 = ruleXExpression ) ) )
{ // InternalPureXbase . g : 507:2 : ( ( ) ( ( ( lv _ writeable _ 1_0 = ' var ' ) ) | otherlv _ 2 = ' val ' ) ( ( ( ( ( ( ruleJvmTypeReference ) ) ( ( ruleValidID ) ) ) ) = > ( ( ( lv _ type _ 3_0 = ruleJvmTypeReference ) ) ( ( lv _ name _ 4_0 = ruleValidID ) ) ) ) | ( ( lv _ name _ 5_0 = ruleValidID ) ) ) otherlv _ 6 = ' = ' ( ( lv _ right _ 7_0 = ruleXExpression ) ) )
// InternalPureXbase . g : 508:3 : ( ) ( ( ( lv _ writeable _ 1_0 = ' var ' ) ) | otherlv _ 2 = ' val ' ) ( ( ( ( ( ( ruleJvmTypeReference ) ) ( ( ruleValidID ) ) ) ) = > ( ( ( lv _ type _ 3_0 = ruleJvmTypeReference ) ) ( ( lv _ name _ 4_0 = ruleValidID ) ) ) ) | ( ( lv _ name _ 5_0 = ruleValidID ) ) ) otherlv _ 6 = ' = ' ( ( lv _ right _ 7_0 = ruleXExpression ) )
{ // InternalPureXbase . g : 508:3 : ( )
// InternalPureXbase . g : 509:4:
{ if ( state . backtracking == 0 ) { current = forceCreateModelElement ( grammarAccess . getInitialisedVariableDeclarationAccess ( ) . getXVariableDeclarationAction_0 ( ) , current ) ; } } // InternalPureXbase . g : 515:3 : ( ( ( lv _ writeable _ 1_0 = ' var ' ) ) | otherlv _ 2 = ' val ' )
int alt14 = 2 ; int LA14_0 = input . LA ( 1 ) ; if ( ( LA14_0 == 18 ) ) { alt14 = 1 ; } else if ( ( LA14_0 == 19 ) ) { alt14 = 2 ; } else { if ( state . backtracking > 0 ) { state . failed = true ; return current ; } NoViableAltException nvae = new NoViableAltException ( "" , 14 , 0 , input ) ; throw nvae ; } switch ( alt14 ) { case 1 : // InternalPureXbase . g : 516:4 : ( ( lv _ writeable _ 1_0 = ' var ' ) )
{ // InternalPureXbase . g : 516:4 : ( ( lv _ writeable _ 1_0 = ' var ' ) )
// InternalPureXbase . g : 517:5 : ( lv _ writeable _ 1_0 = ' var ' )
{ // InternalPureXbase . g : 517:5 : ( lv _ writeable _ 1_0 = ' var ' )
// InternalPureXbase . g : 518:6 : lv _ writeable _ 1_0 = ' var '
{ lv_writeable_1_0 = ( Token ) match ( input , 18 , FOLLOW_11 ) ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { newLeafNode ( lv_writeable_1_0 , grammarAccess . getInitialisedVariableDeclarationAccess ( ) . getWriteableVarKeyword_1_0_0 ( ) ) ; } if ( state . backtracking == 0 ) { if ( current == null ) { current = createModelElement ( grammarAccess . getInitialisedVariableDeclarationRule ( ) ) ; } setWithLastConsumed ( current , "writeable" , true , "var" ) ; } } } } break ; case 2 : // InternalPureXbase . g : 531:4 : otherlv _ 2 = ' val '
{ otherlv_2 = ( Token ) match ( input , 19 , FOLLOW_11 ) ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { newLeafNode ( otherlv_2 , grammarAccess . getInitialisedVariableDeclarationAccess ( ) . getValKeyword_1_1 ( ) ) ; } } break ; } // InternalPureXbase . g : 536:3 : ( ( ( ( ( ( ruleJvmTypeReference ) ) ( ( ruleValidID ) ) ) ) = > ( ( ( lv _ type _ 3_0 = ruleJvmTypeReference ) ) ( ( lv _ name _ 4_0 = ruleValidID ) ) ) ) | ( ( lv _ name _ 5_0 = ruleValidID ) ) )
int alt15 = 2 ; int LA15_0 = input . LA ( 1 ) ; if ( ( LA15_0 == RULE_ID ) ) { int LA15_1 = input . LA ( 2 ) ; if ( ( LA15_1 == 20 ) ) { alt15 = 2 ; } else if ( ( LA15_1 == 54 ) && ( synpred6_InternalPureXbase ( ) ) ) { alt15 = 1 ; } else if ( ( LA15_1 == 28 ) && ( synpred6_InternalPureXbase ( ) ) ) { alt15 = 1 ; } else if ( ( LA15_1 == 61 ) && ( synpred6_InternalPureXbase ( ) ) ) { alt15 = 1 ; } else if ( ( LA15_1 == RULE_ID ) && ( synpred6_InternalPureXbase ( ) ) ) { alt15 = 1 ; } else { if ( state . backtracking > 0 ) { state . failed = true ; return current ; } NoViableAltException nvae = new NoViableAltException ( "" , 15 , 1 , input ) ; throw nvae ; } } else if ( ( LA15_0 == 15 ) && ( synpred6_InternalPureXbase ( ) ) ) { alt15 = 1 ; } else if ( ( LA15_0 == 41 ) && ( synpred6_InternalPureXbase ( ) ) ) { alt15 = 1 ; } else { if ( state . backtracking > 0 ) { state . failed = true ; return current ; } NoViableAltException nvae = new NoViableAltException ( "" , 15 , 0 , input ) ; throw nvae ; } switch ( alt15 ) { case 1 : // InternalPureXbase . g : 537:4 : ( ( ( ( ( ruleJvmTypeReference ) ) ( ( ruleValidID ) ) ) ) = > ( ( ( lv _ type _ 3_0 = ruleJvmTypeReference ) ) ( ( lv _ name _ 4_0 = ruleValidID ) ) ) )
{ // InternalPureXbase . g : 537:4 : ( ( ( ( ( ruleJvmTypeReference ) ) ( ( ruleValidID ) ) ) ) = > ( ( ( lv _ type _ 3_0 = ruleJvmTypeReference ) ) ( ( lv _ name _ 4_0 = ruleValidID ) ) ) )
// InternalPureXbase . g : 538:5 : ( ( ( ( ruleJvmTypeReference ) ) ( ( ruleValidID ) ) ) ) = > ( ( ( lv _ type _ 3_0 = ruleJvmTypeReference ) ) ( ( lv _ name _ 4_0 = ruleValidID ) ) )
{ // InternalPureXbase . g : 551:5 : ( ( ( lv _ type _ 3_0 = ruleJvmTypeReference ) ) ( ( lv _ name _ 4_0 = ruleValidID ) ) )
// InternalPureXbase . g : 552:6 : ( ( lv _ type _ 3_0 = ruleJvmTypeReference ) ) ( ( lv _ name _ 4_0 = ruleValidID ) )
{ // InternalPureXbase . g : 552:6 : ( ( lv _ type _ 3_0 = ruleJvmTypeReference ) )
// InternalPureXbase . g : 553:7 : ( lv _ type _ 3_0 = ruleJvmTypeReference )
{ // InternalPureXbase . g : 553:7 : ( lv _ type _ 3_0 = ruleJvmTypeReference )
// InternalPureXbase . g : 554:8 : lv _ type _ 3_0 = ruleJvmTypeReference
{ if ( state . backtracking == 0 ) { newCompositeNode ( grammarAccess . getInitialisedVariableDeclarationAccess ( ) . getTypeJvmTypeReferenceParserRuleCall_2_0_0_0_0 ( ) ) ; } pushFollow ( FOLLOW_12 ) ; lv_type_3_0 = ruleJvmTypeReference ( ) ; state . _fsp -- ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { if ( current == null ) { current = createModelElementForParent ( grammarAccess . getInitialisedVariableDeclarationRule ( ) ) ; } set ( current , "type" , lv_type_3_0 , "org.eclipse.xtext.xbase.Xtype.JvmTypeReference" ) ; afterParserOrEnumRuleCall ( ) ; } } } // InternalPureXbase . g : 571:6 : ( ( lv _ name _ 4_0 = ruleValidID ) )
// InternalPureXbase . g : 572:7 : ( lv _ name _ 4_0 = ruleValidID )
{ // InternalPureXbase . g : 572:7 : ( lv _ name _ 4_0 = ruleValidID )
// InternalPureXbase . g : 573:8 : lv _ name _ 4_0 = ruleValidID
{ if ( state . backtracking == 0 ) { newCompositeNode ( grammarAccess . getInitialisedVariableDeclarationAccess ( ) . getNameValidIDParserRuleCall_2_0_0_1_0 ( ) ) ; } pushFollow ( FOLLOW_13 ) ; lv_name_4_0 = ruleValidID ( ) ; state . _fsp -- ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { if ( current == null ) { current = createModelElementForParent ( grammarAccess . getInitialisedVariableDeclarationRule ( ) ) ; } set ( current , "name" , lv_name_4_0 , "org.eclipse.xtext.xbase.Xtype.ValidID" ) ; afterParserOrEnumRuleCall ( ) ; } } } } } } break ; case 2 : // InternalPureXbase . g : 593:4 : ( ( lv _ name _ 5_0 = ruleValidID ) )
{ // InternalPureXbase . g : 593:4 : ( ( lv _ name _ 5_0 = ruleValidID ) )
// InternalPureXbase . g : 594:5 : ( lv _ name _ 5_0 = ruleValidID )
{ // InternalPureXbase . g : 594:5 : ( lv _ name _ 5_0 = ruleValidID )
// InternalPureXbase . g : 595:6 : lv _ name _ 5_0 = ruleValidID
{ if ( state . backtracking == 0 ) { newCompositeNode ( grammarAccess . getInitialisedVariableDeclarationAccess ( ) . getNameValidIDParserRuleCall_2_1_0 ( ) ) ; } pushFollow ( FOLLOW_13 ) ; lv_name_5_0 = ruleValidID ( ) ; state . _fsp -- ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { if ( current == null ) { current = createModelElementForParent ( grammarAccess . getInitialisedVariableDeclarationRule ( ) ) ; } set ( current , "name" , lv_name_5_0 , "org.eclipse.xtext.xbase.Xtype.ValidID" ) ; afterParserOrEnumRuleCall ( ) ; } } } } break ; } otherlv_6 = ( Token ) match ( input , 20 , FOLLOW_3 ) ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { newLeafNode ( otherlv_6 , grammarAccess . getInitialisedVariableDeclarationAccess ( ) . getEqualsSignKeyword_3 ( ) ) ; } // InternalPureXbase . g : 617:3 : ( ( lv _ right _ 7_0 = ruleXExpression ) )
// InternalPureXbase . g : 618:4 : ( lv _ right _ 7_0 = ruleXExpression )
{ // InternalPureXbase . g : 618:4 : ( lv _ right _ 7_0 = ruleXExpression )
// InternalPureXbase . g : 619:5 : lv _ right _ 7_0 = ruleXExpression
{ if ( state . backtracking == 0 ) { newCompositeNode ( grammarAccess . getInitialisedVariableDeclarationAccess ( ) . getRightXExpressionParserRuleCall_4_0 ( ) ) ; } pushFollow ( FOLLOW_2 ) ; lv_right_7_0 = ruleXExpression ( ) ; state . _fsp -- ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { if ( current == null ) { current = createModelElementForParent ( grammarAccess . getInitialisedVariableDeclarationRule ( ) ) ; } set ( current , "right" , lv_right_7_0 , "org.eclipse.xtext.xbase.Xbase.XExpression" ) ; afterParserOrEnumRuleCall ( ) ; } } } } } if ( state . backtracking == 0 ) { leaveRule ( ) ; } } catch ( RecognitionException re ) { recover ( input , re ) ; appendSkippedTokens ( ) ; } finally { } return current ;
|
public class URLCoder { /** * Encodes Java string into { @ code x - www - form - urlencoded } format
* @ param plain input value
* @ return encoded value
* @ see URLEncoder # encode ( String , String ) */
public static String encode ( String plain ) { } }
|
try { return URLEncoder . encode ( plain , "UTF-8" ) ; } catch ( UnsupportedEncodingException e ) { throw new IllegalStateException ( "Unable to encode URL entry via " + ENCODING_FOR_URL + ". This should not happen" , e ) ; }
|
public class MediaWikiApiImpl { /** * normalize the given page title
* @ param title
* @ return the normalized title e . g . replacing blanks FIXME encode is not good
* enough
* @ throws Exception */
public String normalizeTitle ( String title ) throws Exception { } }
|
String result = encode ( title ) ; result = result . replace ( "+" , "_" ) ; return result ;
|
public class CommerceCurrencyPersistenceImpl { /** * Removes all the commerce currencies where groupId = & # 63 ; from the database .
* @ param groupId the group ID */
@ Override public void removeByGroupId ( long groupId ) { } }
|
for ( CommerceCurrency commerceCurrency : findByGroupId ( groupId , QueryUtil . ALL_POS , QueryUtil . ALL_POS , null ) ) { remove ( commerceCurrency ) ; }
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.