signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class SplitDataProperties { /** * Defines that the data within an input split is grouped on the fields defined by the field positions . * All records sharing the same key ( combination ) must be subsequently emitted by the input * format for each input split . * < p > < b > * IMPORTANT : Providing wrong information with SplitDataProperties can cause wrong results ! * @ param groupFields The field positions of the grouping keys . * @ return This SplitDataProperties object . */ public SplitDataProperties < T > splitsGroupedBy ( int ... groupFields ) { } }
if ( groupFields == null ) { throw new InvalidProgramException ( "GroupFields may not be null." ) ; } else if ( groupFields . length == 0 ) { throw new InvalidProgramException ( "GroupFields may not be empty." ) ; } if ( this . splitOrdering != null ) { throw new InvalidProgramException ( "DataSource may either be grouped or sorted." ) ; } this . splitGroupKeys = getAllFlatKeys ( groupFields ) ; return this ;
public class GenericAnalyticsRequest { /** * Create a { @ link GenericAnalyticsRequest } and mark it as containing a full Analytics query in Json form * ( including additional query parameters ) . * The simplest form of such a query is a single statement encapsulated in a json query object : * < pre > { " statement " : " SELECT * FROM default " } < / pre > . * @ param jsonQuery the Analytics query in json form . * @ param bucket the bucket on which to perform the query . * @ param username the username authorized for bucket access . * @ param password the password for the user . * @ param targetNode the node on which to execute this request ( or null to let the core locate and choose one ) . * @ return a { @ link GenericAnalyticsRequest } for this full query . */ public static GenericAnalyticsRequest jsonQuery ( String jsonQuery , String bucket , String username , String password , InetAddress targetNode ) { } }
return new GenericAnalyticsRequest ( jsonQuery , true , bucket , username , password , targetNode , NO_PRIORITY ) ;
public class FxRateImpl { /** * Nice human readable description of the FX Rate , useful reminder . * < pre > * CAD . SGD Mkt Convention : true * Quoter buys CAD and sells SGD at 1.1279 * Quoter sells CAD and buys SGD at 1.1289 * < / pre > */ @ Override public String getDescription ( ) { } }
final StringBuilder b = new StringBuilder ( ) ; b . append ( currencyPair ) . append ( " Mkt Convention:" ) . append ( marketConvention ) ; if ( crossCcy != null ) { b . append ( " Cross Ccy:" ) . append ( crossCcy ) ; } final String cr = System . getProperty ( "line.separator" ) ; b . append ( cr ) ; b . append ( "Quoter buys " ) . append ( currencyPair . getCcy1 ( ) ) . append ( " and sells " ) . append ( currencyPair . getCcy2 ( ) ) . append ( " at " ) . append ( bid ) . append ( cr ) ; b . append ( "Quoter sells " ) . append ( currencyPair . getCcy1 ( ) ) . append ( " and buys " ) . append ( currencyPair . getCcy2 ( ) ) . append ( " at " ) . append ( ask ) ; return b . toString ( ) ;
public class Gravatar { /** * Set the default image which will be retrieved when there is no avatar * for the given email , when the avatar can ' t be shown due to the rating * or when you enforce the default avatar . * @ param customDefaultImage Absolute URL to an image . * @ param encoding customDefaultImage ' s ( first parameter ) encoding * @ return Fluent interface * @ see java . net . URLEncoder # encode ( String , String ) */ public Gravatar setCustomDefaultImage ( String customDefaultImage , String encoding ) throws UnsupportedEncodingException { } }
assert customDefaultImage != null && encoding != null ; this . customDefaultImage = URLEncoder . encode ( customDefaultImage , encoding ) ; standardDefaultImage = null ; return this ;
public class TrainingSpecification { /** * A list of the metrics that the algorithm emits that can be used as the objective metric in a hyperparameter * tuning job . * @ param supportedTuningJobObjectiveMetrics * A list of the metrics that the algorithm emits that can be used as the objective metric in a * hyperparameter tuning job . */ public void setSupportedTuningJobObjectiveMetrics ( java . util . Collection < HyperParameterTuningJobObjective > supportedTuningJobObjectiveMetrics ) { } }
if ( supportedTuningJobObjectiveMetrics == null ) { this . supportedTuningJobObjectiveMetrics = null ; return ; } this . supportedTuningJobObjectiveMetrics = new java . util . ArrayList < HyperParameterTuningJobObjective > ( supportedTuningJobObjectiveMetrics ) ;
public class ProcessorTemplateHandler { /** * This method will be called for any event that arrives from a previous handler in the chain ( or the parser , cache . . . ) * when the execution has already been stopped and we ( potentially ) have some work pending . The idea is to queue * these events at the end of the pending queue ( i . e . at level 0 ) so that they are processed normally once all * pending work has been processed too . * Given the cache / parser are immediately stopped once we receive a stop signal , this can only happen if a * pre - processor sits in the middle and produces several " sister " events to the one which handling was * originally stopped . * Also note events used here should always come from previous handlers and never from the execution of pending work * itself , given all pending - work structures ( i . e . all implementations of IEngineProcessable ) should check * the " stopProcessing " flag before executing each event , so they should never produce additional pending events * that would potentially ( and erroneously ) be queued at level 0. */ private void queueEvent ( final ITemplateEvent event ) { } }
final SimpleModelProcessable pendingProcessableModel ; if ( this . pendingProcessingsSize > 0 ) { final IEngineProcessable level0Pending = this . pendingProcessings [ 0 ] ; if ( level0Pending instanceof SimpleModelProcessable && ( ( SimpleModelProcessable ) level0Pending ) . getModelHandler ( ) == this ) { pendingProcessableModel = ( SimpleModelProcessable ) level0Pending ; } else { final Model model = new Model ( this . configuration , this . templateMode ) ; pendingProcessableModel = new SimpleModelProcessable ( model , this , this . flowController ) ; ensurePendingCapacity ( ) ; System . arraycopy ( this . pendingProcessings , 0 , this . pendingProcessings , 1 , this . pendingProcessingsSize ) ; this . pendingProcessings [ 0 ] = pendingProcessableModel ; this . pendingProcessingsSize ++ ; } } else { final Model model = new Model ( this . configuration , this . templateMode ) ; pendingProcessableModel = new SimpleModelProcessable ( model , this , this . flowController ) ; ensurePendingCapacity ( ) ; this . pendingProcessings [ 0 ] = pendingProcessableModel ; this . pendingProcessingsSize ++ ; } pendingProcessableModel . getModel ( ) . add ( event ) ; this . flowController . processorTemplateHandlerPending = true ;
public class FeatureInfoWidgetFactory { /** * Create a default { @ link FeatureInfoWidget } with a { @ link ZoomToObjectAction } * to allow zooming to selected features . * @ param mapPresenter The map presenter used by the action ( s ) . * @ return A feature info widget with actions . */ public FeatureInfoWithActions getFeatureInfoWidgetWithActions ( MapPresenter mapPresenter ) { } }
FeatureInfoWithActions widgetWithActions = new FeatureInfoWithActions ( ) ; widgetWithActions . addHasFeature ( new ZoomToObjectAction ( mapPresenter ) ) ; return widgetWithActions ;
public class ImageModerationsImpl { /** * Returns probabilities of the image containing racy or adult content . * @ param imageStream The image file . * @ param evaluateFileInputOptionalParameter the object representing the optional parameters to be set before calling this API * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the Evaluate object */ public Observable < ServiceResponse < Evaluate > > evaluateFileInputWithServiceResponseAsync ( byte [ ] imageStream , EvaluateFileInputOptionalParameter evaluateFileInputOptionalParameter ) { } }
if ( this . client . baseUrl ( ) == null ) { throw new IllegalArgumentException ( "Parameter this.client.baseUrl() is required and cannot be null." ) ; } if ( imageStream == null ) { throw new IllegalArgumentException ( "Parameter imageStream is required and cannot be null." ) ; } final Boolean cacheImage = evaluateFileInputOptionalParameter != null ? evaluateFileInputOptionalParameter . cacheImage ( ) : null ; return evaluateFileInputWithServiceResponseAsync ( imageStream , cacheImage ) ;
public class WhitesourceService { /** * Checks the policies application of the given OSS information . * @ param orgToken Organization token uniquely identifying the account at white source . * @ param product The product name or token to update . * @ param productVersion The product version . * @ param projectInfos OSS usage information to send to white source . * @ param forceCheckAllDependencies Boolean to check new data only or not . * @ param userKey user key uniquely identifying the account at white source . * @ param requesterEmail Email of the WhiteSource user that requests to update WhiteSource . * @ param logData list of FSA ' s log data events * @ param productToken The product token * @ return Potential result of applying the currently defined policies . * @ throws WssServiceException In case of errors while checking the policies with white source . */ @ Deprecated public CheckPolicyComplianceResult checkPolicyCompliance ( String orgToken , String product , String productVersion , Collection < AgentProjectInfo > projectInfos , boolean forceCheckAllDependencies , String userKey , String requesterEmail , String logData , String productToken ) throws WssServiceException { } }
return client . checkPolicyCompliance ( requestFactory . newCheckPolicyComplianceRequest ( orgToken , product , productVersion , projectInfos , forceCheckAllDependencies , userKey , requesterEmail , logData , productToken ) ) ;
public class QueryAtomContainerCreator { /** * Creates a QueryAtomContainer with the following settings : * < pre > * QueryAtomContainer . create ( container , * Expr . Type . ELEMENT , * Expr . Type . IS _ AROMATIC , * Expr . Type . ALIPHATIC _ ORDER ) ; * < / pre > * @ param container The AtomContainer that stands as model * @ return The new QueryAtomContainer created from container . */ public static QueryAtomContainer createAnyAtomForPseudoAtomQueryContainer ( IAtomContainer container ) { } }
return QueryAtomContainer . create ( container , Expr . Type . ELEMENT , Expr . Type . IS_AROMATIC , Expr . Type . ALIPHATIC_ORDER ) ;
public class VimGenerator2 { /** * Append a Vim comment . * @ param it the receiver of the generated elements . * @ param text the text of the comment . * @ return { @ code it } . */ protected IStyleAppendable appendComment ( IStyleAppendable it , String text ) { } }
return appendComment ( it , true , text ) ;
public class BigtableTableAdminGCJClient { /** * { @ inheritDoc } */ @ Override public void dropRowRange ( String tableId , String rowKeyPrefix ) { } }
delegate . dropRowRange ( tableId , rowKeyPrefix ) ;
public class ProtoParser { /** * Adds an object or objects to a List . */ private void addToList ( List < Object > list , Object value ) { } }
if ( value instanceof List ) { list . addAll ( ( List ) value ) ; } else { list . add ( value ) ; }
public class CreateProductPackageItems { /** * Runs the example . * @ param adManagerServices the services factory . * @ param session the session . * @ param productPackageId the ID of the product package to add product package items to . * @ param productId the ID of the product to generate a product package item from . * @ throws ApiException if the API request failed with one or more service errors . * @ throws RemoteException if the API request failed due to other errors . */ public static void runExample ( AdManagerServices adManagerServices , AdManagerSession session , long productPackageId , long productId ) throws RemoteException { } }
ProductPackageItemServiceInterface productPackageItemService = adManagerServices . get ( session , ProductPackageItemServiceInterface . class ) ; // Create a local product package item . ProductPackageItem productPackageItem = new ProductPackageItem ( ) ; // Set the product from which the product package item is created . productPackageItem . setProductId ( productId ) ; // Set the product package that the product package item belongs to . productPackageItem . setProductPackageId ( productPackageId ) ; // Specify if the product package item is required for this product package . productPackageItem . setIsMandatory ( true ) ; // Create the product package item on the server . ProductPackageItem [ ] productPackageItems = productPackageItemService . createProductPackageItems ( new ProductPackageItem [ ] { productPackageItem } ) ; for ( ProductPackageItem createdProductPackageItem : productPackageItems ) { System . out . printf ( "A product package item with ID %d created from " + "product ID %d belonging to " + "product package with ID %d was created.%n" , createdProductPackageItem . getId ( ) , createdProductPackageItem . getProductId ( ) , createdProductPackageItem . getProductPackageId ( ) ) ; }
public class LineNumberFormatter { /** * Processes { @ link # _ file } , breaking apart any lines on which multiple line - number markers appear in different * columns . * @ return the list of broken lines */ private List < String > breakLines ( List < LineNumberPosition > o_LineBrokenPositions ) throws IOException { } }
int numLinesRead = 0 ; int lineOffset = 0 ; List < String > brokenLines = new ArrayList < > ( ) ; try ( BufferedReader r = new BufferedReader ( new FileReader ( _file ) ) ) { for ( int posIndex = 0 ; posIndex < _positions . size ( ) ; posIndex ++ ) { LineNumberPosition pos = _positions . get ( posIndex ) ; o_LineBrokenPositions . add ( new LineNumberPosition ( pos . getOriginalLine ( ) , pos . getEmittedLine ( ) + lineOffset , pos . getEmittedColumn ( ) ) ) ; // Copy the input file up to but not including the emitted line # in " pos " . while ( numLinesRead < pos . getEmittedLine ( ) - 1 ) { brokenLines . add ( r . readLine ( ) ) ; numLinesRead ++ ; } // Read the line that contains the next line number annotations , but don ' t write it yet . String line = r . readLine ( ) ; numLinesRead ++ ; // See if there are two original line annotations on the same emitted line . LineNumberPosition nextPos ; int prevPartLen = 0 ; char [ ] indent = { } ; do { nextPos = ( posIndex < _positions . size ( ) - 1 ) ? _positions . get ( posIndex + 1 ) : null ; if ( nextPos != null && nextPos . getEmittedLine ( ) == pos . getEmittedLine ( ) && nextPos . getOriginalLine ( ) > pos . getOriginalLine ( ) ) { // Two different source line numbers on the same emitted line ! posIndex ++ ; lineOffset ++ ; String firstPart = line . substring ( 0 , nextPos . getEmittedColumn ( ) - prevPartLen - 1 ) ; brokenLines . add ( new String ( indent ) + firstPart ) ; prevPartLen += firstPart . length ( ) ; indent = new char [ prevPartLen ] ; Arrays . fill ( indent , ' ' ) ; line = line . substring ( firstPart . length ( ) , line . length ( ) ) ; // Alter the position while adding it . o_LineBrokenPositions . add ( new LineNumberPosition ( nextPos . getOriginalLine ( ) , nextPos . getEmittedLine ( ) + lineOffset , nextPos . getEmittedColumn ( ) ) ) ; } else { nextPos = null ; } } while ( nextPos != null ) ; // Nothing special here - - just emit the line . brokenLines . add ( new String ( indent ) + line ) ; } // Copy out the remainder of the file . String line ; while ( ( line = r . readLine ( ) ) != null ) { brokenLines . add ( line ) ; } } return brokenLines ;
public class MinioClient { /** * Lists object information in given bucket and prefix . * @ param bucketName Bucket name . * @ param prefix Prefix string . List objects whose name starts with ` prefix ` . * @ return an iterator of Result Items . * @ throws XmlPullParserException upon parsing response xml */ public Iterable < Result < Item > > listObjects ( final String bucketName , final String prefix ) throws XmlPullParserException { } }
// list all objects recursively return listObjects ( bucketName , prefix , true ) ;
public class NumberFormatContext { /** * Check whether the given value is a valid numeric value that can be * converted to an { @ link Double } . * @ param value The associated value to test * @ return < code > true < / code > The value is a valid double , * < code > false < / code > otherwise * @ see # convertStringToDouble ( String ) * @ see Double # valueOf ( String ) */ public boolean isDouble ( String value ) { } }
boolean result = true ; try { Double . valueOf ( value ) ; } catch ( NumberFormatException e ) { result = false ; } return result ;
public class HttpStaticFileServerHandler { /** * If Keep - Alive is disabled , attaches " Connection : close " header to the response * and closes the connection after the response being sent . */ private void sendAndCleanupConnection ( ChannelHandlerContext ctx , FullHttpResponse response ) { } }
final FullHttpRequest request = this . request ; final boolean keepAlive = HttpUtil . isKeepAlive ( request ) ; HttpUtil . setContentLength ( response , response . content ( ) . readableBytes ( ) ) ; if ( ! keepAlive ) { // We ' re going to close the connection as soon as the response is sent , // so we should also make it clear for the client . response . headers ( ) . set ( HttpHeaderNames . CONNECTION , HttpHeaderValues . CLOSE ) ; } else if ( request . protocolVersion ( ) . equals ( HTTP_1_0 ) ) { response . headers ( ) . set ( HttpHeaderNames . CONNECTION , HttpHeaderValues . KEEP_ALIVE ) ; } ChannelFuture flushPromise = ctx . writeAndFlush ( response ) ; if ( ! keepAlive ) { // Close the connection as soon as the response is sent . flushPromise . addListener ( ChannelFutureListener . CLOSE ) ; }
public class SaverDef { /** * < pre > * The operation to run when saving a model checkpoint . * < / pre > * < code > optional string save _ tensor _ name = 2 ; < / code > */ public com . google . protobuf . ByteString getSaveTensorNameBytes ( ) { } }
java . lang . Object ref = saveTensorName_ ; if ( ref instanceof java . lang . String ) { com . google . protobuf . ByteString b = com . google . protobuf . ByteString . copyFromUtf8 ( ( java . lang . String ) ref ) ; saveTensorName_ = b ; return b ; } else { return ( com . google . protobuf . ByteString ) ref ; }
public class CmisConnector { /** * Determines supertypes for the given CMIS type in terms of JCR . * @ param cmisType given CMIS type * @ return supertypes in JCR lexicon . */ private String [ ] superTypes ( ObjectType cmisType ) { } }
if ( cmisType . getBaseTypeId ( ) == BaseTypeId . CMIS_FOLDER ) { return new String [ ] { JcrConstants . NT_FOLDER } ; } if ( cmisType . getBaseTypeId ( ) == BaseTypeId . CMIS_DOCUMENT ) { return new String [ ] { JcrConstants . NT_FILE } ; } return new String [ ] { cmisType . getParentType ( ) . getId ( ) } ;
public class EmailAutoCompleteTextView { /** * / * Implemented Methods */ @ Override public void onFocusChange ( View v , boolean hasFocus ) { } }
final EmailAutoCompleteTextView editText = ( EmailAutoCompleteTextView ) v ; if ( mClearButtonEnabled ) { editText . setClearVisible ( ( hasFocus && ! TextUtils . isEmpty ( editText . getText ( ) . toString ( ) ) ) ) ; } if ( mOnFocusChangeListener != null ) { mOnFocusChangeListener . onFocusChange ( v , hasFocus ) ; }
public class BoxSearch { /** * Searches all descendant folders using a given query and query parameters . * @ param offset is the starting position . * @ param limit the maximum number of items to return . The default is 30 and the maximum is 200. * @ param bsp containing query and advanced search capabilities . * @ return a PartialCollection containing the search results . */ public PartialCollection < BoxItem . Info > searchRange ( long offset , long limit , final BoxSearchParameters bsp ) { } }
QueryStringBuilder builder = bsp . getQueryParameters ( ) . appendParam ( "limit" , limit ) . appendParam ( "offset" , offset ) ; URL url = SEARCH_URL_TEMPLATE . buildWithQuery ( this . getAPI ( ) . getBaseURL ( ) , builder . toString ( ) ) ; BoxAPIRequest request = new BoxAPIRequest ( this . getAPI ( ) , url , "GET" ) ; BoxJSONResponse response = ( BoxJSONResponse ) request . send ( ) ; JsonObject responseJSON = JsonObject . readFrom ( response . getJSON ( ) ) ; String totalCountString = responseJSON . get ( "total_count" ) . toString ( ) ; long fullSize = Double . valueOf ( totalCountString ) . longValue ( ) ; PartialCollection < BoxItem . Info > results = new PartialCollection < BoxItem . Info > ( offset , limit , fullSize ) ; JsonArray jsonArray = responseJSON . get ( "entries" ) . asArray ( ) ; for ( JsonValue value : jsonArray ) { JsonObject jsonObject = value . asObject ( ) ; BoxItem . Info parsedItemInfo = ( BoxItem . Info ) BoxResource . parseInfo ( this . getAPI ( ) , jsonObject ) ; if ( parsedItemInfo != null ) { results . add ( parsedItemInfo ) ; } } return results ;
public class CXFEndpointProvider { /** * Creates an endpoint reference from a given adress . * @ param address * @ param props * @ return */ private static EndpointReferenceType createEPR ( String address , SLProperties props ) { } }
EndpointReferenceType epr = WSAEndpointReferenceUtils . getEndpointReference ( address ) ; if ( props != null ) { addProperties ( epr , props ) ; } return epr ;
public class ObjectFactory { /** * Create an instance of { @ link JAXBElement } { @ code < } { @ link FeaturePropertyType } { @ code > } * @ param value * Java instance representing xml element ' s value . * @ return * the new instance of { @ link JAXBElement } { @ code < } { @ link FeaturePropertyType } { @ code > } */ @ XmlElementDecl ( namespace = "http://www.opengis.net/gml" , name = "featureMember" ) public JAXBElement < FeaturePropertyType > createFeatureMember ( FeaturePropertyType value ) { } }
return new JAXBElement < FeaturePropertyType > ( _FeatureMember_QNAME , FeaturePropertyType . class , null , value ) ;
public class MaxiCode { /** * { @ inheritDoc } */ @ Override protected void plotSymbol ( ) { } }
// hexagons for ( int row = 0 ; row < 33 ; row ++ ) { for ( int col = 0 ; col < 30 ; col ++ ) { if ( grid [ row ] [ col ] ) { double x = ( 2.46 * col ) + 1.23 ; if ( ( row & 1 ) != 0 ) { x += 1.23 ; } double y = ( 2.135 * row ) + 1.43 ; hexagons . add ( new Hexagon ( x , y ) ) ; } } } // circles double [ ] radii = { 10.85 , 8.97 , 7.10 , 5.22 , 3.31 , 1.43 } ; for ( int i = 0 ; i < radii . length ; i ++ ) { Ellipse2D . Double circle = new Ellipse2D . Double ( ) ; circle . setFrameFromCenter ( 35.76 , 35.60 , 35.76 + radii [ i ] , 35.60 + radii [ i ] ) ; target . add ( circle ) ; }
public class TracingJmsListenerEndpointRegistry { /** * It would be better to trace by wrapping , but * { @ link MethodJmsListenerEndpoint # createMessageListenerInstance ( ) } , is protected so * we can ' t call it from outside code . In other words , a forwarding pattern can ' t be * used . Instead , we copy state from the input . * NOTE : As { @ linkplain MethodJmsListenerEndpoint } is neither final , nor effectively * final . For this reason we can ' t ensure copying will get all state . For example , a * subtype could hold state we aren ' t aware of , or change behavior . We can consider * checking that input is not a subtype , and most conservatively leaving unknown * subtypes untraced . * @ param source jms endpoint * @ return wrapped endpoint */ MethodJmsListenerEndpoint trace ( MethodJmsListenerEndpoint source ) { } }
// Skip out rather than incompletely copying the source if ( this . messageHandlerMethodFactoryField == null || this . embeddedValueResolverField == null ) { return source ; } // We want the stock implementation , except we want to wrap the message listener // in a new span MethodJmsListenerEndpoint dest = new MethodJmsListenerEndpoint ( ) { @ Override protected MessagingMessageListenerAdapter createMessageListenerInstance ( ) { return new TracingMessagingMessageListenerAdapter ( jmsTracing ( ) , currentTraceContext ( ) ) ; } } ; // set state from AbstractJmsListenerEndpoint dest . setId ( source . getId ( ) ) ; dest . setDestination ( source . getDestination ( ) ) ; dest . setSubscription ( source . getSubscription ( ) ) ; dest . setSelector ( source . getSelector ( ) ) ; dest . setConcurrency ( source . getConcurrency ( ) ) ; // set state from MethodJmsListenerEndpoint dest . setBean ( source . getBean ( ) ) ; dest . setMethod ( source . getMethod ( ) ) ; dest . setMostSpecificMethod ( source . getMostSpecificMethod ( ) ) ; try { dest . setMessageHandlerMethodFactory ( get ( source , this . messageHandlerMethodFactoryField ) ) ; dest . setEmbeddedValueResolver ( get ( source , this . embeddedValueResolverField ) ) ; } catch ( IllegalAccessException e ) { return source ; // skip out rather than incompletely copying the source } return dest ;
public class XLifecycleExtension { /** * Assigns a lifecycle transition string to the given event . * @ param event * Event to be tagged . * @ param transition * Lifecycle transition string to be assigned . */ public void assignTransition ( XEvent event , String transition ) { } }
if ( transition != null && transition . trim ( ) . length ( ) > 0 ) { XAttributeLiteral transAttr = ( XAttributeLiteral ) ATTR_TRANSITION . clone ( ) ; transAttr . setValue ( transition . trim ( ) ) ; event . getAttributes ( ) . put ( KEY_TRANSITION , transAttr ) ; }
public class DrizzlePreparedStatement { /** * Sets the designated parameter to the given < code > Reader < / code > object , which is the given number of characters * long . When a very large UNICODE value is input to a < code > LONGVARCHAR < / code > parameter , it may be more practical * to send it via a < code > java . io . Reader < / code > object . The data will be read from the stream as needed until * end - of - file is reached . The JDBC driver will do any necessary conversion from UNICODE to the database char * format . * < P > < B > Note : < / B > This stream object can either be a standard Java stream object or your own subclass that * implements the standard interface . * @ param parameterIndex the first parameter is 1 , the second is 2 , . . . * @ param reader the < code > java . io . Reader < / code > object that contains the Unicode data * @ param length the number of characters in the stream * @ throws java . sql . SQLException if parameterIndex does not correspond to a parameter marker in the SQL statement ; * if a database access error occurs or this method is called on a closed * < code > PreparedStatement < / code > * @ since 1.6 */ public void setCharacterStream ( final int parameterIndex , final Reader reader , final long length ) throws SQLException { } }
if ( reader == null ) { setNull ( parameterIndex , Types . BLOB ) ; return ; } try { setParameter ( parameterIndex , new ReaderParameter ( reader , length ) ) ; } catch ( IOException e ) { throw SQLExceptionMapper . getSQLException ( "Could not read stream: " + e . getMessage ( ) , e ) ; }
public class BasicEvaluationCtx { /** * Returns attribute value ( s ) from the environment section of the request . * @ param type the type of the attribute value ( s ) to find * @ param id the id of the attribute value ( s ) to find * @ param issuer the issuer of the attribute value ( s ) to find or null * @ return a result containing a bag either empty because no values were * found or containing at least one value , or status associated with an * Indeterminate result */ public EvaluationResult getEnvironmentAttribute ( URI type , URI id , URI issuer ) { } }
return getGenericAttributes ( type , id , issuer , environmentMap , null , AttributeDesignator . ENVIRONMENT_TARGET ) ;
public class Sherdog { /** * Gets an event via it ' s sherdog URL . * @ param sherdogUrl Sherdog URL , can be found in the list of event of an organization * @ return an Event * @ throws IOException if connecting to sherdog fails * @ throws ParseException if the page structure has changed * @ throws SherdogParserException if anythign related to the parser goes wrong */ public Event getEvent ( String sherdogUrl ) throws IOException , ParseException , SherdogParserException { } }
return new EventParser ( zoneId ) . parse ( sherdogUrl ) ;
public class Logger { /** * Logs a warning that may or may not have a negative impact . * @ param correlationId ( optional ) transaction id to trace execution through * call chain . * @ param message a human - readable message to log . * @ param args arguments to parameterize the message . */ public void warn ( String correlationId , String message , Object ... args ) { } }
formatAndWrite ( LogLevel . Warn , correlationId , null , message , args ) ;
public class ControlBean { /** * Internal method used to lookup a ControlBeanContextFactory . This factory is used to create the * ControlBeanContext object for this ControlBean . The factory is discoverable from either the containing * ControlBeanContext object or from the environment . If the containing CBC object exposes a * contextual service of type { @ link ControlBeanContextFactory } , the factory returned from this will * be used to create a ControlBeanContext object . * @ param context * @ return the ControlBeanContextFactory discovered in the environment or a default one if no factory is configured */ private ControlBeanContextFactory lookupControlBeanContextFactory ( org . apache . beehive . controls . api . context . ControlBeanContext context ) { } }
// first , try to find the CBCFactory from the container if ( context != null ) { ControlBeanContextFactory cbcFactory = context . getService ( ControlBeanContextFactory . class , null ) ; if ( cbcFactory != null ) { return cbcFactory ; } } // Create the context that acts as the BeanContextProxy for this bean ( the context that this bean _ defines _ ) . try { DiscoverClass discoverer = new DiscoverClass ( ) ; Class factoryClass = discoverer . find ( ControlBeanContextFactory . class , DefaultControlBeanContextFactory . class . getName ( ) ) ; return ( ControlBeanContextFactory ) factoryClass . newInstance ( ) ; } catch ( Exception e ) { throw new ControlException ( "Exception creating ControlBeanContext" , e ) ; }
public class TransactionCheckpointLogRecord { /** * Called to perform recovery action during a warm start of the ObjectManager . * @ param objectManagerState of the ObjectManager performing recovery . * @ throws ObjectManagerException */ public void performRecovery ( ObjectManagerState objectManagerState ) throws ObjectManagerException { } }
if ( Tracing . isAnyTracingEnabled ( ) && trace . isEntryEnabled ( ) ) trace . entry ( this , cclass , "performRecovery" , objectManagerState ) ; if ( Tracing . isAnyTracingEnabled ( ) && trace . isDebugEnabled ( ) ) trace . debug ( this , cclass , "logicalUnitOfWork=" + logicalUnitOfWork + "(LogicalUnitOfWork)" + "\n transactionState=" + transactionState + "(int)" + "\n tokensToAdd=" + tokensToAdd + "(java.util.Collection)" + "\n tokensToReplace=" + tokensToReplace + "(java.util.Collecton)" + "\n serializedBytesToReplace=" + serializedBytesToReplace + "(java.util.Collection)" + "\n tokensToOptimisticReplace=" + tokensToOptimisticReplace + "(java.util.Collection)" + "\n tokensToDelete=" + tokensToDelete + "(java.util.Collection)" + "\n tokensToNotify=" + tokensToNotify + "(java.util.Collection)" ) ; // In principle we should test to see if objectManagerState . checkpointEndSee is true , // because if it is we dont need to do this processing because the transaction is // already in its correct state , however there is also no harm in processing this again . // We have started an new checkpoint but not completed it . // Redo the operations of the ManagedObject using its original Transaction . Transaction transactionForRecovery = objectManagerState . getTransaction ( logicalUnitOfWork ) ; // Recover the Added ManagedObjects . for ( java . util . Iterator tokenIterator = tokensToAdd . iterator ( ) ; tokenIterator . hasNext ( ) ; ) { Token token = ( Token ) tokenIterator . next ( ) ; ManagedObject existingManagedObject = token . getManagedObject ( ) ; // If a subsequent transaction has deleted the ManagedObject we dont need to recover it . if ( existingManagedObject != null ) { // Revert to constructed state . existingManagedObject . state = ManagedObject . stateConstructed ; transactionForRecovery . addFromCheckpoint ( existingManagedObject ) ; } // if ( existingManagedObject ! = null ) . } // for . . . tokensToAdd . // Recover the Replace ManagedObjects . java . util . Iterator serializedBytesIterator = serializedBytesToReplace . iterator ( ) ; for ( java . util . Iterator tokenIterator = tokensToReplace . iterator ( ) ; tokenIterator . hasNext ( ) ; ) { Token token = ( Token ) tokenIterator . next ( ) ; ManagedObject existingManagedObject = token . getManagedObject ( ) ; byte [ ] managedObjectBytes = ( byte [ ] ) serializedBytesIterator . next ( ) ; ManagedObject replacementManagedObject = ManagedObject . restoreFromSerializedBytes ( managedObjectBytes , objectManagerState ) ; // If a subsequent transaction has deleted the ManagedObject we dont need to recover it . if ( existingManagedObject != null ) { // Replace what we already have with this version . transactionForRecovery . lock ( existingManagedObject ) ; // Make the underlying object the way it was when we made the original transaction . replace ( ) // call . token . setManagedObject ( replacementManagedObject ) ; transactionForRecovery . replaceFromCheckpoint ( existingManagedObject , managedObjectBytes ) ; } // if ( existingManagedObject ! = null ) . } // for . . . tokensToReplace . // Recover the Optimistic Replace ManagedObjects . for ( java . util . Iterator tokenIterator = tokensToOptimisticReplace . iterator ( ) ; tokenIterator . hasNext ( ) ; ) { Token token = ( Token ) tokenIterator . next ( ) ; ManagedObject existingManagedObject = token . getManagedObject ( ) ; // If a subsequent transaction has deleted the ManagedObject we dont need to recover it . if ( existingManagedObject != null ) { transactionForRecovery . optimisticReplaceFromCheckpoint ( existingManagedObject ) ; } // if ( existingManagedObject ! = null ) . } // for . . . tokensToOptimisticReplace . // Recover the Delete ManagedObjects . for ( java . util . Iterator tokenIterator = tokensToDelete . iterator ( ) ; tokenIterator . hasNext ( ) ; ) { Token token = ( Token ) tokenIterator . next ( ) ; ManagedObject existingManagedObject = token . getManagedObject ( ) ; // If the object has already been deleted we need not do anything . if ( existingManagedObject != null ) { transactionForRecovery . deleteFromCheckpoint ( existingManagedObject ) ; } // if ( existingManagedObject ! = null ) . } // for . . . tokensToDelete . // Recover the Notify ManagedObjects . for ( java . util . Iterator tokenIterator = tokensToNotify . iterator ( ) ; tokenIterator . hasNext ( ) ; ) { Token token = ( Token ) tokenIterator . next ( ) ; ManagedObject existingManagedObject = token . getManagedObject ( ) ; // If the object has already been deleted we need not do anything . if ( existingManagedObject != null ) { transactionForRecovery . notifyFromCheckpoint ( token ) ; } // if ( existingManagedObject ! = null ) . } // for . . . tokensToNotify . transactionForRecovery . internalTransaction . resetState ( transactionState ) ; if ( Tracing . isAnyTracingEnabled ( ) && trace . isEntryEnabled ( ) ) trace . exit ( this , cclass , "performRecovery" ) ;
public class RollbackConfiguration { /** * The triggers to monitor during stack creation or update actions . * By default , AWS CloudFormation saves the rollback triggers specified for a stack and applies them to any * subsequent update operations for the stack , unless you specify otherwise . If you do specify rollback triggers for * this parameter , those triggers replace any list of triggers previously specified for the stack . This means : * < ul > * < li > * To use the rollback triggers previously specified for this stack , if any , don ' t specify this parameter . * < / li > * < li > * To specify new or updated rollback triggers , you must specify < i > all < / i > the triggers that you want used for this * stack , even triggers you ' ve specifed before ( for example , when creating the stack or during a previous stack * update ) . Any triggers that you don ' t include in the updated list of triggers are no longer applied to the stack . * < / li > * < li > * To remove all currently specified triggers , specify an empty list for this parameter . * < / li > * < / ul > * If a specified trigger is missing , the entire stack operation fails and is rolled back . * @ param rollbackTriggers * The triggers to monitor during stack creation or update actions . < / p > * By default , AWS CloudFormation saves the rollback triggers specified for a stack and applies them to any * subsequent update operations for the stack , unless you specify otherwise . If you do specify rollback * triggers for this parameter , those triggers replace any list of triggers previously specified for the * stack . This means : * < ul > * < li > * To use the rollback triggers previously specified for this stack , if any , don ' t specify this parameter . * < / li > * < li > * To specify new or updated rollback triggers , you must specify < i > all < / i > the triggers that you want used * for this stack , even triggers you ' ve specifed before ( for example , when creating the stack or during a * previous stack update ) . Any triggers that you don ' t include in the updated list of triggers are no longer * applied to the stack . * < / li > * < li > * To remove all currently specified triggers , specify an empty list for this parameter . * < / li > * < / ul > * If a specified trigger is missing , the entire stack operation fails and is rolled back . */ public void setRollbackTriggers ( java . util . Collection < RollbackTrigger > rollbackTriggers ) { } }
if ( rollbackTriggers == null ) { this . rollbackTriggers = null ; return ; } this . rollbackTriggers = new com . amazonaws . internal . SdkInternalList < RollbackTrigger > ( rollbackTriggers ) ;
public class AdCustomizerFeed { /** * Gets the feedAttributes value for this AdCustomizerFeed . * @ return feedAttributes * The AdCustomizerFeed ' s schema . In SET operations , these attributes * will be considered new * attributes and will be appended to the existing list * of attributes unless this list is an exact * copy of the existing list ( as would be obtained via * { @ link AdCustomizerFeedService # get } ) . * < span class = " constraint Selectable " > This field can * be selected using the value " FeedAttributes " . < / span > * < span class = " constraint ContentsNotNull " > This field * must not contain { @ code null } elements . < / span > * < span class = " constraint Required " > This field is required * and should not be { @ code null } when it is contained within { @ link * Operator } s : ADD , SET . < / span > */ public com . google . api . ads . adwords . axis . v201809 . cm . AdCustomizerFeedAttribute [ ] getFeedAttributes ( ) { } }
return feedAttributes ;
public class LogBuffer { /** * Return 32 - bit signed int from buffer . ( little - endian ) * @ see mysql - 5.1.60 / include / my _ global . h - sint4korr */ public final int getInt32 ( final int pos ) { } }
final int position = origin + pos ; if ( pos + 3 >= limit || pos < 0 ) throw new IllegalArgumentException ( "limit excceed: " + ( pos < 0 ? pos : ( pos + 3 ) ) ) ; byte [ ] buf = buffer ; return ( 0xff & buf [ position ] ) | ( ( 0xff & buf [ position + 1 ] ) << 8 ) | ( ( 0xff & buf [ position + 2 ] ) << 16 ) | ( ( buf [ position + 3 ] ) << 24 ) ;
public class IAPlatformClient { /** * getCurrentUser the user from quickbooks * @ throws ConnectionException */ public User getcurrentUser ( String consumerKey , String consumerSecret , String accessToken , String accessTokenSecret ) throws ConnectionException { } }
httpClient = new PlatformHttpClient ( consumerKey , consumerSecret , accessToken , accessTokenSecret ) ; User user = null ; ; try { user = this . httpClient . getCurrentUser ( ) ; } catch ( Exception e ) { e . printStackTrace ( ) ; } return user ;
public class MultiKeySequentialProcessor { /** * Queues up a new task to execute , subject to the dependency keys that match the given filter . * This task will not begin execution until all previous tasks whose keys match the given filter have finished . * In addition , no subsequent task for any key that matches the given filter will begin executing until this task has * finished executing . * @ param keyFilter A Predicate defining the filter that determines which keys this task will depend on . * @ param toRun A Supplier that will be invoked when it is this task ' s turn to run . It will return a * CompletableFuture that will complete when this task completes . * @ param < ReturnType > Return type . * @ return A CompletableFuture that will complete with the result from the CompletableFuture returned by toRun , * when toRun completes executing . */ public < ReturnType > CompletableFuture < ReturnType > addWithFilter ( Predicate < KeyType > keyFilter , Supplier < CompletableFuture < ? extends ReturnType > > toRun ) { } }
CompletableFuture < ReturnType > result = new CompletableFuture < > ( ) ; ArrayList < CompletableFuture < ? > > existingTasks = new ArrayList < > ( ) ; synchronized ( this . queue ) { Exceptions . checkNotClosed ( this . closed , this ) ; // Collect all currently executing tasks for the given keys . for ( val e : this . queue . entrySet ( ) ) { if ( keyFilter . test ( e . getKey ( ) ) ) { existingTasks . add ( e . getValue ( ) ) ; } } executeAfterIfNeeded ( existingTasks , toRun , result ) ; // Record the action . this . filterQueue . put ( keyFilter , result ) ; } executeNowIfNeeded ( existingTasks , toRun , result ) ; // Cleanup : if this was the last task in the queue , then clean up the queue . result . whenComplete ( ( r , ex ) -> cleanupFilter ( keyFilter ) ) ; return result ;
public class AlluxioStatusException { /** * Converts checked throwables to Alluxio status exceptions . Unchecked throwables should not be * passed to this method . Use Throwables . propagateIfPossible before passing a Throwable to this * method . * @ param throwable a throwable * @ return the converted { @ link AlluxioStatusException } */ public static AlluxioStatusException fromCheckedException ( Throwable throwable ) { } }
try { throw throwable ; } catch ( IOException e ) { return fromIOException ( e ) ; } catch ( AlluxioException e ) { return fromAlluxioException ( e ) ; } catch ( InterruptedException e ) { return new CancelledException ( e ) ; } catch ( RuntimeException e ) { throw new IllegalStateException ( "Expected a checked exception but got " + e ) ; } catch ( Exception e ) { return new UnknownException ( e ) ; } catch ( Throwable t ) { throw new IllegalStateException ( "Expected a checked exception but got " + t ) ; }
public class Currency { /** * Returns the localized currency symbol for this currency in { @ code locale } . * That is , given " USD " and Locale . US , you ' d get " $ " , but given " USD " and a non - US locale , * you ' d get " US $ " . * < p > If the locale only specifies a language rather than a language and a country ( such as * { @ code Locale . JAPANESE } or { new Locale ( " en " , " " ) } rather than { @ code Locale . JAPAN } or * { new Locale ( " en " , " US " ) } ) , the ISO 4217 currency code is returned . * < p > If there is no locale - specific currency symbol , the ISO 4217 currency code is returned . */ public String getSymbol ( Locale locale ) { } }
if ( locale == null ) { throw new NullPointerException ( "locale == null" ) ; } // Check the locale first , in case the locale has the same currency . LocaleData localeData = LocaleData . get ( locale ) ; if ( localeData . internationalCurrencySymbol . equals ( currencyCode ) ) { return localeData . currencySymbol ; } // Try ICU , and fall back to the currency code if ICU has nothing . String symbol = ICU . getCurrencySymbol ( locale , currencyCode ) ; return symbol != null ? symbol : currencyCode ;
public class DeterministicHierarchy { /** * Extends the tree by calculating the requested child for the given path . For example , to get the key at position * 1/2/3 you would pass 1/2 as the parent path and 3 as the child number . * @ param parentPath the path to the parent * @ param relative whether the path is relative to the root path * @ param createParent whether the parent corresponding to path should be created ( with any necessary ancestors ) if it doesn ' t exist already * @ return the requested key . * @ throws IllegalArgumentException if the parent doesn ' t exist and createParent is false . */ public DeterministicKey deriveChild ( List < ChildNumber > parentPath , boolean relative , boolean createParent , ChildNumber createChildNumber ) { } }
return deriveChild ( get ( parentPath , relative , createParent ) , createChildNumber ) ;
public class Client { /** * Opens a TCP only client . * @ see # connect ( int , InetAddress , int , int ) */ public void connect ( int timeout , String host , int tcpPort ) throws IOException { } }
connect ( timeout , InetAddress . getByName ( host ) , tcpPort , - 1 ) ;
public class LazyList { /** * Converts the resultset to list of maps , where each map represents a row in the resultset keyed off column names . * @ return list of maps , where each map represents a row in the resultset keyed off column names . */ public List < Map < String , Object > > toMaps ( ) { } }
hydrate ( ) ; List < Map < String , Object > > maps = new ArrayList < > ( delegate . size ( ) ) ; for ( T t : delegate ) { maps . add ( t . toMap ( ) ) ; } return maps ;
public class CSSHandler { /** * Create a { @ link CascadingStyleSheet } object from a parsed object . * @ param eVersion * The CSS version to use . May not be < code > null < / code > . * @ param aNode * The parsed CSS object to read . May not be < code > null < / code > . * @ return Never < code > null < / code > . */ @ Nonnull @ Deprecated public static CascadingStyleSheet readCascadingStyleSheetFromNode ( @ Nonnull final ECSSVersion eVersion , @ Nonnull final CSSNode aNode ) { } }
return readCascadingStyleSheetFromNode ( eVersion , aNode , CSSReader . getDefaultInterpretErrorHandler ( ) ) ;
public class MailSender { /** * 发送邮件 , 调用此方法前请先检查邮件服务器是否已经设置 , 如果没有设置 , 请先设置 { @ link MailSender # setHost ( String ) } , 如不设置将使用默认的QQ邮件服务器 * @ param to 收件箱 * @ param title 标题 * @ param content 内容 * @ param from 发件箱 * @ param key 密码 * @ throws Exception 异常 */ public static void sendMail ( String to , String title , String content , String from , String key ) throws Exception { } }
setFrom ( from ) ; setKey ( key ) ; sendMail ( to , title , content ) ;
public class Parser { /** * This method checks if current position is a legal start position for a * strong or emph sequence by checking the last parsed character ( - sequence ) . */ protected boolean isLegalEmphOrStrongStartPos ( ) { } }
if ( currentIndex ( ) == 0 ) return true ; Object lastItem = peek ( 1 ) ; Class < ? > lastClass = lastItem . getClass ( ) ; SuperNode supernode ; while ( SuperNode . class . isAssignableFrom ( lastClass ) ) { supernode = ( SuperNode ) lastItem ; if ( supernode . getChildren ( ) . size ( ) < 1 ) return true ; lastItem = supernode . getChildren ( ) . get ( supernode . getChildren ( ) . size ( ) - 1 ) ; lastClass = lastItem . getClass ( ) ; } return ( TextNode . class . equals ( lastClass ) && ( ( TextNode ) lastItem ) . getText ( ) . endsWith ( " " ) ) || ( SimpleNode . class . equals ( lastClass ) ) || ( java . lang . Integer . class . equals ( lastClass ) ) ;
public class TimeZoneFormat { /** * Break input String into String [ ] . Each array element represents * a code point . This method is used for parsing localized digit * characters and support characters in Unicode supplemental planes . * @ param str the string * @ return the array of code points in String [ ] */ private static String [ ] toCodePoints ( String str ) { } }
int len = str . codePointCount ( 0 , str . length ( ) ) ; String [ ] codePoints = new String [ len ] ; for ( int i = 0 , offset = 0 ; i < len ; i ++ ) { int code = str . codePointAt ( offset ) ; int codeLen = Character . charCount ( code ) ; codePoints [ i ] = str . substring ( offset , offset + codeLen ) ; offset += codeLen ; } return codePoints ;
public class RSA { /** * 加密 * @ param key * @ param plainBytes * @ return */ public static byte [ ] encrypt ( PublicKey key , byte [ ] plainBytes ) { } }
ByteArrayOutputStream out = null ; try { Cipher cipher = Cipher . getInstance ( CIPHER_ALGORITHM ) ; cipher . init ( Cipher . ENCRYPT_MODE , key ) ; int inputLen = plainBytes . length ; if ( inputLen <= MAX_ENCRYPT_BLOCK ) { return cipher . doFinal ( plainBytes ) ; } out = new ByteArrayOutputStream ( ) ; int offSet = 0 ; byte [ ] cache ; int i = 0 ; // 对数据分段加密 while ( inputLen - offSet > 0 ) { if ( inputLen - offSet > MAX_ENCRYPT_BLOCK ) { cache = cipher . doFinal ( plainBytes , offSet , MAX_ENCRYPT_BLOCK ) ; } else { cache = cipher . doFinal ( plainBytes , offSet , inputLen - offSet ) ; } out . write ( cache , 0 , cache . length ) ; i ++ ; offSet = i * MAX_ENCRYPT_BLOCK ; } return out . toByteArray ( ) ; } catch ( NoSuchAlgorithmException e ) { throw new RuntimeException ( "无此解密算法" ) ; } catch ( NoSuchPaddingException e ) { e . printStackTrace ( ) ; return null ; } catch ( InvalidKeyException e ) { throw new RuntimeException ( "解密私钥非法,请检查" ) ; } catch ( IllegalBlockSizeException e ) { throw new RuntimeException ( "密文长度非法" ) ; } catch ( BadPaddingException e ) { throw new RuntimeException ( "密文数据已损坏" ) ; } finally { try { if ( out != null ) out . close ( ) ; } catch ( Exception e2 ) { } }
public class MultiProcessCluster { /** * Starts the specified master . * @ param i the index of the master to start */ public synchronized void startMaster ( int i ) throws IOException { } }
Preconditions . checkState ( mState == State . STARTED , "Must be in a started state to start masters" ) ; mMasters . get ( i ) . start ( ) ;
public class CmsProperty { /** * Calls < code > { @ link # setAutoCreatePropertyDefinition ( boolean ) } < / code > for each * property object in the given List with the given < code > value < / code > parameter . < p > * This method will modify the objects in the input list directly . < p > * @ param list a list of { @ link CmsProperty } objects to modify * @ param value boolean value * @ return the modified list of { @ link CmsProperty } objects * @ see # setAutoCreatePropertyDefinition ( boolean ) */ public static final List < CmsProperty > setAutoCreatePropertyDefinitions ( List < CmsProperty > list , boolean value ) { } }
CmsProperty property ; // choose the fastest method to traverse the list if ( list instanceof RandomAccess ) { for ( int i = 0 , n = list . size ( ) ; i < n ; i ++ ) { property = list . get ( i ) ; property . m_autoCreatePropertyDefinition = value ; } } else { Iterator < CmsProperty > i = list . iterator ( ) ; while ( i . hasNext ( ) ) { property = i . next ( ) ; property . m_autoCreatePropertyDefinition = value ; } } return list ;
public class RunList { /** * Reduce the size of the list by only leaving relatively new ones . * This also removes on - going builds , as RSS cannot be used to publish information * if it changes . * < em > Warning : < / em > this method mutates the original list and then returns it . */ public RunList < R > newBuilds ( ) { } }
GregorianCalendar cal = new GregorianCalendar ( ) ; cal . add ( Calendar . DAY_OF_YEAR , - 7 ) ; final long t = cal . getTimeInMillis ( ) ; // can ' t publish on - going builds return filter ( new Predicate < R > ( ) { public boolean apply ( R r ) { return ! r . isBuilding ( ) ; } } ) // put at least 10 builds , but otherwise ignore old builds . limit ( new CountingPredicate < R > ( ) { public boolean apply ( int index , R r ) { return index < 10 || r . getTimeInMillis ( ) >= t ; } } ) ;
public class ChangeLogAdapter { /** * Set the changelog for this adapter * @ param log */ public void setChangeLog ( ChangeLog log ) { } }
mChangeLog = log ; // Clear out any existing entries clear ( ) ; // sort all the changes Collections . sort ( mChangeLog . versions , new VersionComparator ( ) ) ; // Iterate and add all the ' Change ' objects in the adapter for ( Version version : mChangeLog . versions ) { addAll ( version . changes ) ; } // Notify content has changed notifyDataSetChanged ( ) ;
public class MtasSpanPositionSpans { /** * ( non - Javadoc ) * @ see org . apache . lucene . search . DocIdSetIterator # nextDoc ( ) */ @ Override public int nextDoc ( ) throws IOException { } }
do { IndexDoc indexDoc = mtasCodecInfo . getNextDoc ( field , docId ) ; if ( indexDoc != null ) { docId = indexDoc . docId ; minPosition = Math . max ( start , indexDoc . minPosition ) ; maxPosition = Math . min ( end , indexDoc . maxPosition ) ; currentStartPosition = - 1 ; currentEndPosition = - 1 ; } else { docId = NO_MORE_DOCS ; minPosition = NO_MORE_POSITIONS ; maxPosition = NO_MORE_POSITIONS ; currentStartPosition = NO_MORE_POSITIONS ; currentEndPosition = NO_MORE_POSITIONS ; } } while ( docId != NO_MORE_DOCS && ( minPosition > maxPosition ) ) ; return docId ;
public class DeferredLintHandler { /** * Sets the current position to the provided { @ code currentPos } . { @ link LintLogger } s * passed to subsequent invocations of { @ link # report ( LintLogger ) } will be associated * with the given position . */ public DiagnosticPosition setPos ( DiagnosticPosition currentPos ) { } }
DiagnosticPosition prevPosition = this . currentPos ; this . currentPos = currentPos ; return prevPosition ;
public class Write { /** * Indicates that this write completed successfully . This will set the final result on the externalCompletion future . */ Timer complete ( ) { } }
Preconditions . checkState ( this . entryId . get ( ) >= 0 , "entryId not set; cannot complete Write." ) ; this . failureCause . set ( null ) ; this . result . complete ( new LedgerAddress ( this . writeLedger . get ( ) . metadata , this . entryId . get ( ) ) ) ; return endAttempt ( ) ;
public class AppMsg { /** * Make a { @ link AppMsg } with a custom view . It can be used to create non - floating notifications if floating is false . * @ param context The context to use . Usually your * { @ link android . app . Activity } object . * @ param view * View to be used . * @ param text The text to show . Can be formatted text . * @ param style The style with a background and a duration . * @ param floating true if it ' ll float . */ private static AppMsg makeText ( Activity context , CharSequence text , Style style , View view , boolean floating ) { } }
return makeText ( context , text , style , view , floating , 0 ) ;
public class MtasDataItemLongFull { /** * ( non - Javadoc ) * @ see * mtas . codec . util . DataCollector . MtasDataItemFull # getDistribution ( java . lang . * String ) */ @ Override protected HashMap < String , Object > getDistribution ( String argument ) { } }
HashMap < String , Object > result = new LinkedHashMap < > ( ) ; Long start = null ; Long end = null ; Long step = null ; Integer number = null ; if ( argument != null ) { Matcher m = fpArgument . matcher ( argument ) ; // get settings while ( m . find ( ) ) { if ( m . group ( 1 ) . trim ( ) . equals ( "start" ) ) { start = Long . parseLong ( m . group ( 2 ) ) ; } else if ( m . group ( 1 ) . trim ( ) . equals ( "end" ) ) { end = Long . parseLong ( m . group ( 2 ) ) ; } else if ( m . group ( 1 ) . trim ( ) . equals ( "step" ) ) { step = Long . parseLong ( m . group ( 2 ) ) ; } else if ( m . group ( 1 ) . trim ( ) . equals ( "number" ) ) { number = Integer . parseInt ( m . group ( 2 ) ) ; } } } // always exactly one of ( positive ) number and ( positive ) step , other null if ( ( number == null || number < 1 ) && ( step == null || step < 1 ) ) { number = 10 ; step = null ; } else if ( step != null && step < 1 ) { step = null ; } else if ( number != null && number < 1 ) { number = null ; } else if ( step != null ) { number = null ; } // sanity checks start / end createStats ( ) ; long tmpStart = Double . valueOf ( Math . floor ( stats . getMin ( ) ) ) . longValue ( ) ; long tmpEnd = Double . valueOf ( Math . ceil ( stats . getMax ( ) ) ) . longValue ( ) ; if ( start != null && end != null && start > end ) { return null ; } else if ( start != null && start > tmpEnd ) { return null ; } else if ( end != null && end < tmpStart ) { return null ; } // check start and end if ( start == null && end == null ) { if ( step == null ) { step = - Math . floorDiv ( ( tmpStart - tmpEnd - 1 ) , number ) ; } number = Long . valueOf ( - Math . floorDiv ( ( tmpStart - tmpEnd - 1 ) , step ) ) . intValue ( ) ; start = tmpStart ; end = start + ( number * step ) ; } else if ( start == null ) { if ( step == null ) { step = - Math . floorDiv ( ( tmpStart - end - 1 ) , number ) ; } number = Long . valueOf ( - Math . floorDiv ( ( tmpStart - end - 1 ) , step ) ) . intValue ( ) ; start = end - ( number * step ) ; } else if ( end == null ) { if ( step == null ) { step = - Math . floorDiv ( ( start - tmpEnd - 1 ) , number ) ; } number = Long . valueOf ( - Math . floorDiv ( ( start - tmpEnd - 1 ) , step ) ) . intValue ( ) ; end = start + ( number * step ) ; } else { if ( step == null ) { step = - Math . floorDiv ( ( start - end - 1 ) , number ) ; } number = Long . valueOf ( - Math . floorDiv ( ( start - end - 1 ) , step ) ) . intValue ( ) ; } long [ ] list = new long [ number ] ; for ( Long v : fullValues ) { if ( v >= start && v <= end ) { int i = Long . valueOf ( Math . floorDiv ( ( v - start ) , step ) ) . intValue ( ) ; list [ i ] ++ ; } } for ( int i = 0 ; i < number ; i ++ ) { Long l = start + i * step ; Long r = Math . min ( end , l + step - 1 ) ; String key ; if ( step > 1 && r > l ) { key = "[" + l + "," + r + "]" ; } else { key = "[" + l + "]" ; } result . put ( key , list [ i ] ) ; } return result ;
public class ClassWriter { /** * Write field symbol , entering all references into constant pool . */ void writeField ( VarSymbol v ) { } }
int flags = adjustFlags ( v . flags ( ) ) ; databuf . appendChar ( flags ) ; if ( dumpFieldModifiers ) { PrintWriter pw = log . getWriter ( Log . WriterKind . ERROR ) ; pw . println ( "FIELD " + v . name ) ; pw . println ( "---" + flagNames ( v . flags ( ) ) ) ; } databuf . appendChar ( pool . put ( v . name ) ) ; databuf . appendChar ( pool . put ( typeSig ( v . erasure ( types ) ) ) ) ; int acountIdx = beginAttrs ( ) ; int acount = 0 ; if ( v . getConstValue ( ) != null ) { int alenIdx = writeAttr ( names . ConstantValue ) ; databuf . appendChar ( pool . put ( v . getConstValue ( ) ) ) ; endAttr ( alenIdx ) ; acount ++ ; } acount += writeMemberAttrs ( v ) ; endAttrs ( acountIdx , acount ) ;
public class FnBigDecimal { /** * Determines whether the target object and the specified object are equal * by calling the < tt > equals < / tt > method on the target object . * @ param object the { @ link BigDecimal } to compare to the target * @ return true if both objects are equal , false if not . */ public static final Function < BigDecimal , Boolean > eq ( final BigDecimal object ) { } }
return ( Function < BigDecimal , Boolean > ) ( ( Function ) FnObject . eq ( object ) ) ;
public class TargetProtocolItemStream { /** * Rebuild Guaranteed Delivery Target Streams * Feature 171905.99 * @ throws MessageStoreException * @ throws SIResourceException */ private void reconstituteTargetStreams ( ProducerInputHandler inputHandler ) throws MessageStoreException , SIException , SIResourceException { } }
if ( tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "reconstituteTargetStreams" ) ; /* * Iterate through all contained Protocol Items , rebuilding * associated target streams for each . */ NonLockingCursor cursor = null ; try { cursor = newNonLockingItemCursor ( new ClassEqualsFilter ( StreamSet . class ) ) ; AbstractItem item = null ; while ( null != ( item = cursor . next ( ) ) ) { StreamSet streamSet = ( StreamSet ) item ; inputHandler . reconstituteTargetStreams ( streamSet ) ; } } finally { if ( cursor != null ) cursor . finished ( ) ; } if ( tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "reconstituteTargetStreams" ) ;
public class PeriodUtil { /** * Calculates the next period end . The calculation is in local time . * @ param now the current time in GMT ms since the epoch * @ return the time of the next period in GMT ms since the epoch */ private static long periodEnd ( long now , long period , LocalDateTime cal ) { } }
if ( period < 0 ) return Long . MAX_VALUE ; else if ( period == 0 ) return now ; if ( period < 30 * DAY ) { // cal . setGMTTime ( now ) ; long localTime = cal . toEpochSecond ( ZoneOffset . UTC ) * 1000 ; localTime = localTime + ( period - ( localTime + 4 * DAY ) % period ) ; // cal . setLocalTime ( localTime ) ; // return cal . getGMTTime ( ) ; return cal . toEpochSecond ( ZoneOffset . UTC ) * 1000 ; } if ( period % ( 30 * DAY ) == 0 ) { int months = ( int ) ( period / ( 30 * DAY ) ) ; // cal . setGMTTime ( now ) ; long year = cal . getYear ( ) ; int month = cal . getMonthValue ( ) ; // cal . setLocalTime ( 0 ) ; // cal . setDate ( year , month + months , 1 ) ; cal . withMonth ( month + months ) ; cal . withDayOfMonth ( 1 ) ; return cal . toEpochSecond ( ZoneOffset . UTC ) * 1000 ; } if ( period % ( 365 * DAY ) == 0 ) { long years = ( period / ( 365 * DAY ) ) ; // cal . setGMTTime ( now ) ; long year = cal . getYear ( ) ; // cal . setLocalTime ( 0 ) ; long newYear = year + ( years - year % years ) ; cal . withYear ( ( int ) newYear ) ; cal . withMonth ( 1 ) ; cal . withDayOfMonth ( 1 ) ; return cal . toEpochSecond ( ZoneOffset . UTC ) * 1000 ; } // cal . setGMTTime ( now ) ; // long localTime = cal . getLocalTime ( ) ; // localTime = localTime + ( period - ( localTime + 4 * DAY ) % period ) ; // cal . setLocalTime ( localTime ) ; // return cal . getGMTTime ( ) ; return cal . toEpochSecond ( ZoneOffset . UTC ) * 1000 ;
public class TechnologyTargeting { /** * Sets the deviceCategoryTargeting value for this TechnologyTargeting . * @ param deviceCategoryTargeting * The device categories being targeted by the { @ link LineItem } . */ public void setDeviceCategoryTargeting ( com . google . api . ads . admanager . axis . v201902 . DeviceCategoryTargeting deviceCategoryTargeting ) { } }
this . deviceCategoryTargeting = deviceCategoryTargeting ;
public class OCSPVerifier { /** * Gets the revocation status ( Good , Revoked or Unknown ) of the given peer certificate . * @ param peerCert The certificate that needs to be validated . * @ param issuerCert Needs to create OCSP request . * @ return Revocation status of the peer certificate . * @ throws CertificateVerificationException Occurs when it fails to verify the ocsp status of certificate . */ public RevocationStatus checkRevocationStatus ( X509Certificate peerCert , X509Certificate issuerCert ) throws CertificateVerificationException { } }
// check cache . Check inside the cache , before calling CA . if ( cache != null ) { SingleResp resp = cache . getCacheValue ( peerCert . getSerialNumber ( ) ) ; if ( resp != null ) { // If cant be casted , we have used the wrong cache . RevocationStatus status = getRevocationStatus ( resp ) ; if ( LOG . isInfoEnabled ( ) ) { LOG . info ( "OCSP response taken from cache." ) ; } return status ; } } OCSPReq request = generateOCSPRequest ( issuerCert , peerCert . getSerialNumber ( ) ) ; List < String > locations = getAIALocations ( peerCert ) ; OCSPResp ocspResponse = null ; for ( String serviceUrl : locations ) { SingleResp [ ] responses ; try { ocspResponse = getOCSPResponce ( serviceUrl , request ) ; if ( OCSPResponseStatus . SUCCESSFUL != ocspResponse . getStatus ( ) ) { continue ; // Server didn ' t give the correct response . } BasicOCSPResp basicResponse = ( BasicOCSPResp ) ocspResponse . getResponseObject ( ) ; responses = ( basicResponse == null ) ? null : basicResponse . getResponses ( ) ; } catch ( Exception e ) { continue ; } if ( responses != null && responses . length == 1 ) { SingleResp resp = responses [ 0 ] ; RevocationStatus status = getRevocationStatus ( resp ) ; if ( cache != null ) { cache . setCacheValue ( ocspResponse , peerCert . getSerialNumber ( ) , resp , request , serviceUrl ) ; } return status ; } } throw new CertificateVerificationException ( "Could not get revocation status from OCSP." ) ;
public class Ifc2x3tc1PackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public EClass getIfcPressureMeasure ( ) { } }
if ( ifcPressureMeasureEClass == null ) { ifcPressureMeasureEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( Ifc2x3tc1Package . eNS_URI ) . getEClassifiers ( ) . get ( 726 ) ; } return ifcPressureMeasureEClass ;
public class XDMClientChildSbb { /** * ( non - Javadoc ) * @ see org . restcomm . slee . enabler . xdmc . XDMClientControl # put ( java . net . URI , * java . lang . String , byte [ ] , Credentials ) */ public void put ( URI uri , String mimetype , byte [ ] content , Credentials credentials ) throws IOException { } }
put ( uri , mimetype , content , null , credentials ) ;
public class AutoBytePool { /** * 重设大小 , 并且在末尾加一个值 * @ param size 大小 * @ param value 值 */ void resize ( int size , byte value ) { } }
if ( size > _capacity ) { resizeBuf ( size ) ; } while ( _size < size ) { _buf [ _size ++ ] = value ; }
public class CobolPrimitiveType { /** * Convert mainframe data into a Java object . * @ param cobolContext host COBOL configuration parameters * @ param hostData the byte array containing mainframe data * @ param start the start position for the expected type in the byte array * @ return the mainframe value as a java object */ public FromHostPrimitiveResult < T > fromHost ( CobolContext cobolContext , byte [ ] hostData , int start ) { } }
return fromHost ( javaClass , cobolContext , hostData , start ) ;
public class DeepLinkUtil { /** * Returns current remote connection link , that * contains connection host and port . ( Example : ` localhost : 9401 ` ) . * Returns null , of current connection is local * @ return current remote connection link or null , if current connection is local */ public static String getCurrentRemoteConnectionLink ( ) { } }
if ( ! APILookupUtility . isLocal ( ) ) try { return URLEncoder . encode ( APILookupUtility . getCurrentRemoteInstance ( ) . getHost ( ) , "UTF-8" ) + COLON_URL_ENCODED + APILookupUtility . getCurrentRemoteInstance ( ) . getPort ( ) ; } catch ( IllegalStateException | UnsupportedEncodingException ignored ) { /* null be returned */ } return null ;
public class CommerceShippingMethodPersistenceImpl { /** * Returns the first commerce shipping method in the ordered set where groupId = & # 63 ; and active = & # 63 ; . * @ param groupId the group ID * @ param active the active * @ param orderByComparator the comparator to order the set by ( optionally < code > null < / code > ) * @ return the first matching commerce shipping method * @ throws NoSuchShippingMethodException if a matching commerce shipping method could not be found */ @ Override public CommerceShippingMethod findByG_A_First ( long groupId , boolean active , OrderByComparator < CommerceShippingMethod > orderByComparator ) throws NoSuchShippingMethodException { } }
CommerceShippingMethod commerceShippingMethod = fetchByG_A_First ( groupId , active , orderByComparator ) ; if ( commerceShippingMethod != null ) { return commerceShippingMethod ; } StringBundler msg = new StringBundler ( 6 ) ; msg . append ( _NO_SUCH_ENTITY_WITH_KEY ) ; msg . append ( "groupId=" ) ; msg . append ( groupId ) ; msg . append ( ", active=" ) ; msg . append ( active ) ; msg . append ( "}" ) ; throw new NoSuchShippingMethodException ( msg . toString ( ) ) ;
public class SSHLauncher { /** * Method reads an input stream into a byte array and closes the input stream when finished . * Added for reading the remoting jar and generating a hash value for it . * @ param inputStream * @ return * @ throws IOException */ static byte [ ] readInputStreamIntoByteArrayAndClose ( InputStream inputStream ) throws IOException { } }
byte [ ] bytes = null ; try { bytes = ByteStreams . toByteArray ( inputStream ) ; } catch ( IOException e ) { throw e ; } finally { IOUtils . closeQuietly ( inputStream ) ; if ( bytes == null ) { bytes = new byte [ 1 ] ; } } return bytes ;
public class EnumLiteralDeclarationImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public void eUnset ( int featureID ) { } }
switch ( featureID ) { case XtextPackage . ENUM_LITERAL_DECLARATION__ENUM_LITERAL : setEnumLiteral ( ( EEnumLiteral ) null ) ; return ; case XtextPackage . ENUM_LITERAL_DECLARATION__LITERAL : setLiteral ( ( Keyword ) null ) ; return ; } super . eUnset ( featureID ) ;
public class ProgramControl { /** * AddMasterListeners Method . */ public void addMasterListeners ( ) { } }
super . addMasterListeners ( ) ; this . getField ( ProgramControl . LAST_PACKAGE_UPDATE ) . setEnabled ( false ) ; this . addListener ( new FileListener ( ) { public void doNewRecord ( boolean bDisplayOption ) { super . doNewRecord ( bDisplayOption ) ; PropertiesStringField field = ( PropertiesStringField ) this . getOwner ( ) . getField ( ProgramControl . BASE_DIRECTORY ) ; field . enableConversion = false ; if ( ! field . isModified ( ) ) if ( field . getDefault ( ) . equals ( field . getData ( ) ) ) { try { String home = System . getProperty ( "user.home" ) ; String current = System . getProperty ( "user.dir" ) ; if ( ! home . equals ( current ) ) if ( current . startsWith ( home ) ) { // Set the base directory to the current directories ' home current = Utility . addToPath ( ( String ) field . getDefault ( ) , current . substring ( home . length ( ) ) ) ; int lastPathSeparator = current . lastIndexOf ( File . separator ) ; if ( lastPathSeparator == - 1 ) lastPathSeparator = current . lastIndexOf ( '/' ) ; if ( lastPathSeparator != - 1 ) { ( ( InitOnceFieldHandler ) field . getListener ( InitOnceFieldHandler . class ) ) . setFirstTime ( true ) ; field . setString ( current . substring ( 0 , lastPathSeparator ) , DBConstants . DISPLAY , DBConstants . INIT_MOVE ) ; } } } catch ( SecurityException e ) { // Ignore ( default is fine ) } } field . enableConversion = true ; } } ) ;
public class Bits { /** * Divide bits . * @ param numerator the numerator * @ param denominator the denominator * @ param maxBits the max bits * @ return the bits */ public static Bits divide ( long numerator , long denominator , long maxBits ) { } }
if ( maxBits <= 0 ) return NULL ; if ( numerator == 0 ) return ZERO ; if ( numerator == denominator ) return ONE ; if ( numerator < denominator ) { return ZERO . concatenate ( divide ( numerator * 2 , denominator , maxBits - 1 ) ) ; } else { return ONE . concatenate ( divide ( 2 * ( numerator - denominator ) , denominator , maxBits - 1 ) ) ; }
public class RecyclerViewAdapterWrapper { /** * Creates and returns a runnable , which allows to change the selection state of a specific * { @ link Checkable } . * @ param checkable * The checkable , whose selection state should be changed , as an instance of the type * { @ link Checkable } . The checkable may not be null . * @ param checked * True , if the checkable should be selected , false otherwise * @ return The runnable , which has been created , as an instance of the type { @ link Runnable } . * The runnable may not be null */ @ NonNull private Runnable createCheckableRunnable ( @ NonNull final Checkable checkable , final boolean checked ) { } }
return new Runnable ( ) { @ Override public void run ( ) { checkable . setChecked ( checked ) ; } } ;
public class CertificateLoader { /** * Get key manager factory * @ param keyStoreStream Keystore input stream * @ param storeProperties store properties * @ return Key manager factory * @ throws IOException * @ throws GeneralSecurityException */ private static KeyManagerFactory getKeyManagerFactory ( InputStream keyStoreStream , StoreProperties storeProperties ) throws IOException , GeneralSecurityException { } }
// use provider if given , otherwise use the first matching security provider final KeyStore ks ; if ( StringUtils . isNotBlank ( storeProperties . getProvider ( ) ) ) { ks = KeyStore . getInstance ( storeProperties . getType ( ) , storeProperties . getProvider ( ) ) ; } else { ks = KeyStore . getInstance ( storeProperties . getType ( ) ) ; } ks . load ( keyStoreStream , storeProperties . getPassword ( ) . toCharArray ( ) ) ; KeyManagerFactory kmf = KeyManagerFactory . getInstance ( storeProperties . getManagerType ( ) ) ; kmf . init ( ks , storeProperties . getPassword ( ) . toCharArray ( ) ) ; return kmf ;
public class BeaconEvent { /** * Return the event data parsed into the given type . * @ param type type token to parse data * @ param < R > type to parse the data into * @ return the event data */ public < R > R getData ( TypeToken < R > type ) { } }
return GSON . fromJson ( data , type . getType ( ) ) ;
public class SimpleDataArray { /** * Initialize this SimpleDataArray after it is instantiated . */ protected void init ( ) { } }
try { // Initialize the current working segment _segment = _segmentManager . nextSegment ( ) ; // Segment index buffer is enabled by default ! _sib = _segmentManager . openSegmentIndexBuffer ( _segment . getSegmentId ( ) ) ; if ( ! _sibEnabled ) _sib . markAsDirty ( ) ; _log . info ( "Segment " + _segment . getSegmentId ( ) + " online: " + _segment . getStatus ( ) ) ; } catch ( IOException ioe ) { _log . error ( ioe . getMessage ( ) , ioe ) ; throw new SegmentException ( "Instantiation failed due to " + ioe . getMessage ( ) ) ; }
public class CryptoHelper { /** * Verify signature . * @ param algorithm algorithm name . * @ param secretBytes algorithm secret . * @ param contentBytes the content to which the signature applies . * @ param signatureBytes JWT signature . * @ return true if signature is valid . * @ throws NoSuchAlgorithmException if the algorithm is not supported . * @ throws InvalidKeyException if the given key is inappropriate for initializing the specified algorithm . * @ deprecated rather use corresponding method which takes header and payload as separate inputs */ @ Deprecated boolean verifySignatureFor ( String algorithm , byte [ ] secretBytes , byte [ ] contentBytes , byte [ ] signatureBytes ) throws NoSuchAlgorithmException , InvalidKeyException { } }
return MessageDigest . isEqual ( createSignatureFor ( algorithm , secretBytes , contentBytes ) , signatureBytes ) ;
public class MusixMatch { /** * Returns the track response which was returned through the query . * @ param methodName * the name of the API method . * @ param params * a map which contains the key - value pair * @ return the track details . * @ throws MusixMatchException * if any error occurs . */ private Track getTrackResponse ( String methodName , Map < String , Object > params ) throws MusixMatchException { } }
Track track = new Track ( ) ; String response = null ; TrackGetMessage message = null ; response = MusixMatchRequest . sendRequest ( Helper . getURLString ( methodName , params ) ) ; Gson gson = new Gson ( ) ; try { message = gson . fromJson ( response , TrackGetMessage . class ) ; } catch ( JsonParseException jpe ) { handleErrorResponse ( response ) ; } TrackData data = message . getTrackMessage ( ) . getBody ( ) . getTrack ( ) ; track . setTrack ( data ) ; return track ;
public class QueryBuilder { /** * Add " GROUP BY " clause to the SQL query statement . This can be called multiple times to add additional " GROUP BY " * clauses . * NOTE : Use of this means that the resulting objects may not have a valid ID column value so cannot be deleted or * updated . */ public QueryBuilder < T , ID > groupBy ( String columnName ) { } }
FieldType fieldType = verifyColumnName ( columnName ) ; if ( fieldType . isForeignCollection ( ) ) { throw new IllegalArgumentException ( "Can't groupBy foreign collection field: " + columnName ) ; } addGroupBy ( ColumnNameOrRawSql . withColumnName ( columnName ) ) ; return this ;
public class Util { /** * Returns true if generated code can invoke { @ code constructor } . That is , if * the constructor is non - private and its enclosing class is either a * top - level class or a static nested class . */ public static boolean isCallableConstructor ( ExecutableElement constructor ) { } }
if ( constructor . getModifiers ( ) . contains ( Modifier . PRIVATE ) ) { return false ; } TypeElement type = ( TypeElement ) constructor . getEnclosingElement ( ) ; return type . getEnclosingElement ( ) . getKind ( ) == ElementKind . PACKAGE || type . getModifiers ( ) . contains ( Modifier . STATIC ) ;
public class AppServiceCertificateOrdersInner { /** * Renew an existing certificate order . * Renew an existing certificate order . * @ param resourceGroupName Name of the resource group to which the resource belongs . * @ param certificateOrderName Name of the certificate order . * @ param renewCertificateOrderRequest Renew parameters * @ param serviceCallback the async ServiceCallback to handle successful and failed responses . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the { @ link ServiceFuture } object */ public ServiceFuture < Void > renewAsync ( String resourceGroupName , String certificateOrderName , RenewCertificateOrderRequest renewCertificateOrderRequest , final ServiceCallback < Void > serviceCallback ) { } }
return ServiceFuture . fromResponse ( renewWithServiceResponseAsync ( resourceGroupName , certificateOrderName , renewCertificateOrderRequest ) , serviceCallback ) ;
public class SchemaNode { /** * - - - - - private methods - - - - - */ private void registerParentType ( final Map < String , SchemaNode > schemaNodes , final SchemaNode parentSchemaNode , final Map < String , GraphQLType > graphQLTypes , final Map < String , GraphQLFieldDefinition > fields , final Set < String > blacklist ) throws FrameworkException { } }
if ( parentSchemaNode != null && ! parentSchemaNode . equals ( this ) ) { final String parentName = parentSchemaNode . getClassName ( ) ; if ( parentName != null && ! blacklist . contains ( parentName ) ) { if ( ! graphQLTypes . containsKey ( parentName ) ) { // initialize parent type parentSchemaNode . initializeGraphQL ( schemaNodes , graphQLTypes , blacklist ) ; } // second try : add fields from parent type if ( graphQLTypes . containsKey ( parentName ) ) { final GraphQLObjectType parentType = ( GraphQLObjectType ) graphQLTypes . get ( parentName ) ; if ( parentType != null ) { for ( final GraphQLFieldDefinition field : parentType . getFieldDefinitions ( ) ) { fields . put ( field . getName ( ) , field ) ; } } } } }
public class CodeBuilderUtil { /** * Calls toString on a StringBuilder . A StringBuilder must be on the stack , * and a String is left on the stack after the call . */ public static void callStringBuilderToString ( CodeBuilder b ) { } }
// Because of JDK1.5 bug which exposes AbstractStringBuilder class , // cannot use reflection to get method signature . TypeDesc stringBuilder = TypeDesc . forClass ( StringBuilder . class ) ; b . invokeVirtual ( stringBuilder , "toString" , TypeDesc . STRING , null ) ;
public class URIClassLoader { /** * Add specified URL at the end of the search path . * @ param url the URL to add */ @ Override public void addURL ( URL url ) { } }
this . finder . addURI ( URI . create ( url . toExternalForm ( ) ) ) ;
public class GlobalConfiguration { /** * Filters files in directory which have the specified suffix ( e . g . " . xml " ) . * @ param dirToFilter * directory to filter * @ param suffix * suffix to filter files by ( e . g . " . xml " ) * @ return files with given ending in directory */ private static File [ ] filterFilesBySuffix ( final File dirToFilter , final String suffix ) { } }
return filterFilesBySuffix ( dirToFilter , new String [ ] { suffix } ) ;
public class HttpIgnoreBodyCallback { /** * @ see * com . ibm . wsspi . channelfw . InterChannelCallback # complete ( com . ibm . wsspi . channelfw * . VirtualConnection ) */ public void complete ( VirtualConnection vc ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "complete() called: " + vc ) ; } // can ' t do anything with a null VC if ( null == vc ) { return ; } Object o = vc . getStateMap ( ) . get ( CallbackIDs . CALLBACK_HTTPISC ) ; if ( null == o ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "ERROR: null ISC in complete()" ) ; } return ; } HttpInboundServiceContextImpl sc = ( HttpInboundServiceContextImpl ) o ; // need to continue the purge of the request body try { VirtualConnection rc = null ; do { WsByteBuffer buffer = sc . getRequestBodyBuffer ( ) ; if ( null != buffer ) { buffer . release ( ) ; rc = sc . getRequestBodyBuffer ( this , false ) ; } else { // end of body found if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "Reached end of the body being purged" ) ; } sc . getLink ( ) . close ( vc , null ) ; return ; } } while ( null != rc ) ; } catch ( Exception purgeException ) { // no FFDC required if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "Exception purging request body: " + purgeException ) ; } sc . getLink ( ) . close ( vc , purgeException ) ; } // if we get here then we ' re waiting for the next callback usage
public class CQLStatementCache { /** * Get the given prepared statement for the given table and query . Upon * first invocation for a given combo , the query is parsed and cached . * @ param tableName Name of table to customize query for . * @ param query Inquiry { @ link Query } . * @ return PreparedStatement for given combo . */ public PreparedStatement getPreparedQuery ( String tableName , Query query ) { } }
synchronized ( m_prepQueryMap ) { Map < Query , PreparedStatement > statementMap = m_prepQueryMap . get ( tableName ) ; if ( statementMap == null ) { statementMap = new HashMap < > ( ) ; m_prepQueryMap . put ( tableName , statementMap ) ; } PreparedStatement prepState = statementMap . get ( query ) ; if ( prepState == null ) { prepState = prepareQuery ( tableName , query ) ; statementMap . put ( query , prepState ) ; } return prepState ; }
public class AppearancePreferenceFragment { /** * Creates and returns a listener , which allows to adapt the elevation of the toolbar , when the * value of the corresponding preference has been changed . * @ return The listener , which has been created , as an instance of the type { @ link * OnPreferenceChangeListener } */ private OnPreferenceChangeListener createToolbarElevationChangeListener ( ) { } }
return new OnPreferenceChangeListener ( ) { @ Override public boolean onPreferenceChange ( Preference preference , Object newValue ) { int elevation = Integer . valueOf ( ( String ) newValue ) ; ( ( PreferenceActivity ) getActivity ( ) ) . setToolbarElevation ( elevation ) ; return true ; } } ;
public class CommerceTierPriceEntryUtil { /** * Returns the first commerce tier price entry in the ordered set where groupId = & # 63 ; . * @ param groupId the group ID * @ param orderByComparator the comparator to order the set by ( optionally < code > null < / code > ) * @ return the first matching commerce tier price entry , or < code > null < / code > if a matching commerce tier price entry could not be found */ public static CommerceTierPriceEntry fetchByGroupId_First ( long groupId , OrderByComparator < CommerceTierPriceEntry > orderByComparator ) { } }
return getPersistence ( ) . fetchByGroupId_First ( groupId , orderByComparator ) ;
public class Tree { /** * Puts a node with the specified value into the specified path . * @ param path * path ( e . g . " path . to . node [ 0 ] " ) * @ param value * the new value * @ return container node */ public Tree putObject ( String path , Object value ) { } }
return putObjectInternal ( path , getNodeValue ( value ) , false ) ;
public class TypicalLoginAssist { /** * Are the user ID and expire date extracted from cookie valid ? * @ param userKey The key of the login user . ( NotNull ) * @ param expireDate The string expression for expire date of remember - me access token . ( NotNull ) * @ return Is a validation for remember - me OK ? */ protected boolean isValidRememberMeCookie ( String userKey , String expireDate ) { } }
final String currentDate = formatForRememberMeExpireDate ( timeManager . currentHandyDate ( ) ) ; if ( currentDate . compareTo ( expireDate ) < 0 ) { // String v . s . String return true ; // valid access token within time limit } // expired here logger . debug ( "The access token for remember-me expired: userKey={} expireDate={}" , userKey , expireDate ) ; return false ;
public class ImgUtil { /** * 根据文件创建字体 < br > * 首先尝试创建 { @ link Font # TRUETYPE _ FONT } 字体 , 此类字体无效则创建 { @ link Font # TYPE1 _ FONT } * @ param fontStream 字体流 * @ return { @ link Font } * @ since 3.0.9 */ public static Font createFont ( InputStream fontStream ) { } }
try { return Font . createFont ( Font . TRUETYPE_FONT , fontStream ) ; } catch ( FontFormatException e ) { // True Type字体无效时使用Type1字体 try { return Font . createFont ( Font . TYPE1_FONT , fontStream ) ; } catch ( Exception e1 ) { throw new UtilException ( e1 ) ; } } catch ( IOException e ) { throw new IORuntimeException ( e ) ; }
public class Recycler { /** * Returns , if exists , a view of the type < code > typeView < / code > . * @ param itemType the type of view that you want . * @ return a viewHolder of the type < code > typeView < / code > . < code > null < / code > if * not found . */ @ Nullable ViewHolder popRecycledViewHolder ( int itemType ) { } }
Deque < ViewHolder > deque = mViewHolders . get ( itemType ) ; return deque == null || deque . isEmpty ( ) ? null : deque . pop ( ) ;
public class ClassGenerator { /** * Convert a ( normalized ) parsed type into a string - based representation of some Java type . */ List < String > generateTypes ( List < MessageType > msgTypes ) { } }
return msgTypes . stream ( ) . map ( t -> t . accept ( stringVisitor , null ) ) . collect ( Collectors . toList ( ) ) ;
public class SequenceLabelerCrossValidator { /** * Starts the evaluation . * @ param samples * the data to train and test * @ param nFolds * number of folds * @ throws IOException * if io errors */ public void evaluate ( final ObjectStream < SequenceLabelSample > samples , final int nFolds ) throws IOException { } }
// Note : The name samples need to be grouped on a document basis . final CrossValidationPartitioner < DocumentSample > partitioner = new CrossValidationPartitioner < DocumentSample > ( new NameToDocumentSampleStream ( samples ) , nFolds ) ; while ( partitioner . hasNext ( ) ) { final CrossValidationPartitioner . TrainingSampleStream < DocumentSample > trainingSampleStream = partitioner . next ( ) ; SequenceLabelerModel model = null ; if ( this . factory != null ) { model = SequenceLabelerME . train ( this . languageCode , new DocumentToNameSampleStream ( trainingSampleStream ) , this . params , this . factory ) ; } else { System . err . println ( "You need to implement a SequenceLabelerFactory!" ) ; System . exit ( 1 ) ; } String corpusFormat = Flags . getCorpusFormat ( this . params ) ; // do testing final SequenceLabelerEvaluator evaluator = new SequenceLabelerEvaluator ( corpusFormat , new SequenceLabelerME ( model ) , this . listeners ) ; evaluator . evaluate ( new DocumentToNameSampleStream ( trainingSampleStream . getTestSampleStream ( ) ) ) ; this . fmeasure . mergeInto ( evaluator . getFMeasure ( ) ) ; }
public class PiElectronegativity { /** * calculate the electronegativity of orbitals pi . * @ param ac IAtomContainer * @ param atom atom for which effective atom electronegativity should be calculated * @ return piElectronegativity */ public double calculatePiElectronegativity ( IAtomContainer ac , IAtom atom ) { } }
return calculatePiElectronegativity ( ac , atom , maxI , maxRS ) ;
public class DefaultExtensionRepositoryManager { /** * ExtensionRepository */ @ Override public Extension resolve ( ExtensionId extensionId ) throws ResolveException { } }
ResolveException lastException = null ; for ( ExtensionRepository repository : this . repositories ) { try { return repository . resolve ( extensionId ) ; } catch ( ExtensionNotFoundException e1 ) { this . logger . debug ( "Could not find extension [{}] in repository [{}]" , extensionId , repository . getDescriptor ( ) , e1 ) ; } catch ( ResolveException e2 ) { this . logger . error ( "Unexpected error when trying to find extension [{}] in repository [{}]" , extensionId , repository . getDescriptor ( ) , e2 ) ; lastException = e2 ; } } if ( lastException != null ) { throw new ResolveException ( MessageFormat . format ( "Failed to resolve extension [{0}]" , extensionId ) , lastException ) ; } else { throw new ExtensionNotFoundException ( MessageFormat . format ( "Could not find extension [{0}]" , extensionId ) ) ; }
public class LinkBuilderSupport { /** * ( non - Javadoc ) * @ see org . springframework . hateoas . LinkBuilder # slash ( java . lang . Object ) */ public T slash ( @ Nullable Object object ) { } }
object = object instanceof Optional ? ( ( Optional < ? > ) object ) . orElse ( null ) : object ; if ( object == null ) { return getThis ( ) ; } String path = object . toString ( ) ; if ( path . endsWith ( "#" ) ) { path = path . substring ( 0 , path . length ( ) - 1 ) ; } if ( ! StringUtils . hasText ( path ) ) { return getThis ( ) ; } path = path . startsWith ( "/" ) ? path : "/" . concat ( path ) ; return slash ( UriComponentsBuilder . fromUriString ( path ) . build ( ) , false ) ;
public class CommonOps_ZDRM { /** * Creates a new matrix which is the specified submatrix of ' src ' * s < sub > i - y0 , j - x0 < / sub > = o < sub > ij < / sub > for all y0 & le ; i & lt ; y1 and x0 & le ; j & lt ; x1 < br > * < br > * where ' s < sub > ij < / sub > ' is an element in the submatrix and ' o < sub > ij < / sub > ' is an element in the * original matrix . * @ param src The original matrix which is to be copied . Not modified . * @ param srcX0 Start column . * @ param srcX1 Stop column + 1. * @ param srcY0 Start row . * @ param srcY1 Stop row + 1. * @ return Extracted submatrix . */ public static ZMatrixRMaj extract ( ZMatrixRMaj src , int srcY0 , int srcY1 , int srcX0 , int srcX1 ) { } }
if ( srcY1 <= srcY0 || srcY0 < 0 || srcY1 > src . numRows ) throw new IllegalArgumentException ( "srcY1 <= srcY0 || srcY0 < 0 || srcY1 > src.numRows" ) ; if ( srcX1 <= srcX0 || srcX0 < 0 || srcX1 > src . numCols ) throw new IllegalArgumentException ( "srcX1 <= srcX0 || srcX0 < 0 || srcX1 > src.numCols" ) ; int w = srcX1 - srcX0 ; int h = srcY1 - srcY0 ; ZMatrixRMaj dst = new ZMatrixRMaj ( h , w ) ; extract ( src , srcY0 , srcY1 , srcX0 , srcX1 , dst , 0 , 0 ) ; return dst ;