signature
stringlengths 43
39.1k
| implementation
stringlengths 0
450k
|
|---|---|
public class BaseConvertToMessage { /** * Create the root element for this message .
* You SHOULD override this if the unmarshaller has a native method to unmarshall a dom node .
* @ return The root element . */
public Object unmarshalRootElement ( Node node , BaseXmlTrxMessageIn soapTrxMessage ) throws Exception { } }
|
// Override this ! ( If you can ! )
TransformerFactory tFact = TransformerFactory . newInstance ( ) ; Source source = new DOMSource ( node ) ; Writer writer = new StringWriter ( ) ; Result result = new StreamResult ( writer ) ; Transformer transformer = tFact . newTransformer ( ) ; transformer . transform ( source , result ) ; writer . flush ( ) ; writer . close ( ) ; String strXMLBody = writer . toString ( ) ; Reader inStream = new StringReader ( strXMLBody ) ; Object msg = this . unmarshalRootElement ( inStream , soapTrxMessage ) ; inStream . close ( ) ; return msg ;
|
public class GoogleHadoopFileSystemBase { /** * { @ inheritDoc } */
@ Override public void setXAttr ( Path path , String name , byte [ ] value , EnumSet < XAttrSetFlag > flags ) throws IOException { } }
|
logger . atFine ( ) . log ( "GHFS.setXAttr: %s, %s, %s, %s" , path , name , lazy ( ( ) -> new String ( value , UTF_8 ) ) , flags ) ; checkNotNull ( path , "path should not be null" ) ; checkNotNull ( name , "name should not be null" ) ; checkArgument ( flags != null && ! flags . isEmpty ( ) , "flags should not be null or empty" ) ; FileInfo fileInfo = getGcsFs ( ) . getFileInfo ( getGcsPath ( path ) ) ; String xAttrKey = getXAttrKey ( name ) ; Map < String , byte [ ] > attributes = fileInfo . getAttributes ( ) ; if ( attributes . containsKey ( xAttrKey ) && ! flags . contains ( XAttrSetFlag . REPLACE ) ) { throw new IOException ( String . format ( "REPLACE flag must be set to update XAttr (name='%s', value='%s') for '%s'" , name , new String ( value , UTF_8 ) , path ) ) ; } if ( ! attributes . containsKey ( xAttrKey ) && ! flags . contains ( XAttrSetFlag . CREATE ) ) { throw new IOException ( String . format ( "CREATE flag must be set to create XAttr (name='%s', value='%s') for '%s'" , name , new String ( value , UTF_8 ) , path ) ) ; } UpdatableItemInfo updateInfo = new UpdatableItemInfo ( fileInfo . getItemInfo ( ) . getResourceId ( ) , ImmutableMap . of ( xAttrKey , getXAttrValue ( value ) ) ) ; getGcsFs ( ) . getGcs ( ) . updateItems ( ImmutableList . of ( updateInfo ) ) ; logger . atFine ( ) . log ( "GHFS.setXAttr:=> " ) ;
|
public class EOCImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
@ Override public boolean eIsSet ( int featureID ) { } }
|
switch ( featureID ) { case AfplibPackage . EOC__OBJ_CNAME : return OBJ_CNAME_EDEFAULT == null ? objCName != null : ! OBJ_CNAME_EDEFAULT . equals ( objCName ) ; case AfplibPackage . EOC__TRIPLETS : return triplets != null && ! triplets . isEmpty ( ) ; } return super . eIsSet ( featureID ) ;
|
public class GenericConnectionFactory { /** * { @ inheritDoc } */
public Connection getJdbcConnection ( boolean readOnly ) throws RepositoryException { } }
|
try { Connection conn = dbDataSource . getConnection ( ) ; if ( readOnly ) { // set this feature only if it asked
conn . setReadOnly ( readOnly ) ; } return conn ; } catch ( SQLException e ) { String err = "Error of JDBC connection open. SQLException: " + e . getMessage ( ) + ", SQLState: " + e . getSQLState ( ) + ", VendorError: " + e . getErrorCode ( ) ; throw new RepositoryException ( err , e ) ; }
|
public class VFS { /** * Create and mount a real file system , returning a single handle which will unmount and close the filesystem when
* closed .
* @ param realRoot the real filesystem root
* @ param mountPoint the point at which the filesystem should be mounted
* @ return a handle
* @ throws IOException if an error occurs */
public static Closeable mountReal ( File realRoot , VirtualFile mountPoint ) throws IOException { } }
|
return doMount ( new RealFileSystem ( realRoot ) , mountPoint ) ;
|
public class Currency { /** * Returns the set of available currencies . The returned set of currencies contains all of the
* available currencies , including obsolete ones . The result set can be modified without
* affecting the available currencies in the runtime .
* @ return The set of available currencies . The returned set could be empty if there is no
* currency data available . */
public static Set < Currency > getAvailableCurrencies ( ) { } }
|
CurrencyMetaInfo info = CurrencyMetaInfo . getInstance ( ) ; List < String > list = info . currencies ( CurrencyFilter . all ( ) ) ; HashSet < Currency > resultSet = new HashSet < Currency > ( list . size ( ) ) ; for ( String code : list ) { resultSet . add ( getInstance ( code ) ) ; } return resultSet ;
|
public class CmsSolrCopyModifiedUpateProcessorFactory { /** * Read the parameters on initialization .
* @ see org . apache . solr . update . processor . UpdateRequestProcessorFactory # init ( org . apache . solr . common . util . NamedList ) */
@ Override public void init ( NamedList args ) { } }
|
Object regex = args . remove ( PARAM_REGEX ) ; if ( null == regex ) { throw new SolrException ( ErrorCode . SERVER_ERROR , "Missing required init parameter: " + PARAM_REGEX ) ; } try { m_regex = Pattern . compile ( regex . toString ( ) ) ; } catch ( PatternSyntaxException e ) { throw new SolrException ( ErrorCode . SERVER_ERROR , "Invalid regex: " + regex , e ) ; } Object replacement = args . remove ( PARAM_REPLACEMENT ) ; if ( null == replacement ) { throw new SolrException ( ErrorCode . SERVER_ERROR , "Missing required init parameter: " + PARAM_REPLACEMENT ) ; } m_replacement = replacement . toString ( ) ; Object source = args . remove ( PARAM_SOURCE ) ; if ( null == source ) { throw new SolrException ( ErrorCode . SERVER_ERROR , "Missing required init parameter: " + PARAM_SOURCE ) ; } m_source = source . toString ( ) ; Object target = args . remove ( PARAM_TARGET ) ; if ( null == target ) { throw new SolrException ( ErrorCode . SERVER_ERROR , "Missing required init parameter: " + PARAM_TARGET ) ; } m_target = target . toString ( ) ;
|
public class AbstractExtensionPropertySerializer { /** * Creates a new { @ link Element } with the given name and text content .
* @ param document the document used to create the new { @ link Element } .
* @ param elementName the name of the element to create
* @ param elementTextContent the text content of the element
* @ return the new element
* @ since 7.0M2 */
protected Element createRootElement ( Document document , String elementName , String elementTextContent ) { } }
|
Element element = createRootElement ( document , elementName ) ; element . setTextContent ( elementTextContent ) ; return element ;
|
public class MessageLogGridScreen { /** * Does the current user have permission to access this screen .
* @ return NORMAL _ RETURN if access is allowed , ACCESS _ DENIED or LOGIN _ REQUIRED otherwise . */
public int checkSecurity ( ) { } }
|
int iErrorCode = super . checkSecurity ( ) ; if ( ( iErrorCode == DBConstants . NORMAL_RETURN ) || ( iErrorCode == Constants . READ_ACCESS ) ) { // Okay , their group can access this screen , but can this user access this data ?
String strUserContactType = this . getProperty ( DBParams . CONTACT_TYPE ) ; String strUserContactID = this . getProperty ( DBParams . CONTACT_ID ) ; String strContactTypeID = this . getScreenRecord ( ) . getField ( MessageLogScreenRecord . CONTACT_TYPE_ID ) . toString ( ) ; if ( ( strContactTypeID == null ) || ( strContactTypeID . length ( ) == 0 ) ) if ( ( strUserContactType != null ) && ( strUserContactType . length ( ) > 0 ) ) { if ( ! Utility . isNumeric ( strUserContactType ) ) { ContactType recContactType = ( ContactType ) ( ( ReferenceField ) this . getScreenRecord ( ) . getField ( MessageLogScreenRecord . CONTACT_TYPE_ID ) ) . getReferenceRecord ( this ) ; strUserContactType = Integer . toString ( recContactType . getIDFromCode ( strUserContactType ) ) ; } this . getScreenRecord ( ) . getField ( MessageLogScreenRecord . CONTACT_TYPE_ID ) . setString ( strContactTypeID = strUserContactType ) ; } String strContactID = this . getScreenRecord ( ) . getField ( MessageLogScreenRecord . CONTACT_ID ) . toString ( ) ; if ( ( strContactID == null ) || ( strContactID . length ( ) == 0 ) ) if ( ( strUserContactID != null ) && ( strUserContactID . length ( ) > 0 ) ) this . getScreenRecord ( ) . getField ( MessageLogScreenRecord . CONTACT_ID ) . setString ( strContactID = strUserContactID ) ; iErrorCode = this . checkContactSecurity ( strContactTypeID , strContactID ) ; } return iErrorCode ;
|
public class Alignments { /** * Return the alignment matches in the specified alignment pair as 0 - based [ closed , open ) ranges .
* @ param alignmentPair alignment pair , must not be null
* @ return the alignment matches in the alignment pair as 0 - based [ closed , open ) ranges */
public static List < Range < Long > > matches ( final AlignmentPair alignmentPair ) { } }
|
checkNotNull ( alignmentPair ) ; List < Range < Long > > matches = new ArrayList < Range < Long > > ( ) ; int matchStart = - 1 ; for ( int i = 1 , length = alignmentPair . length ( ) + 1 ; i < length ; i ++ ) { if ( isMatchSymbol ( alignmentPair . symbolAt ( i ) ) ) { if ( matchStart < 0 ) { matchStart = i ; } } else { if ( matchStart > 0 ) { // biojava coordinates are 1 - based
matches . add ( Range . closedOpen ( Long . valueOf ( matchStart - 1L ) , Long . valueOf ( i - 1L ) ) ) ; matchStart = - 1 ; } } } if ( matchStart > 0 ) { matches . add ( Range . closedOpen ( Long . valueOf ( matchStart - 1L ) , Long . valueOf ( alignmentPair . length ( ) ) ) ) ; } return matches ;
|
public class SearchIndex { /** * Creates a lucene < code > Document < / code > for a node state using the
* namespace mappings < code > nsMappings < / code > .
* @ param node
* the node state to index .
* @ param nsMappings
* the namespace mappings of the search index .
* @ param indexFormatVersion
* the index format version that should be used to index the
* passed node state .
* @ param loadAllProperties
* Indicates whether all the properties should be loaded using the method
* { @ link ItemDataConsumer # getChildPropertiesData ( org . exoplatform . services . jcr . datamodel . NodeData ) }
* @ return a lucene < code > Document < / code > that contains all properties of
* < code > node < / code > .
* @ throws RepositoryException
* if an error occurs while indexing the < code > node < / code > . */
protected Document createDocument ( NodeDataIndexing node , NamespaceMappings nsMappings , IndexFormatVersion indexFormatVersion , boolean loadAllProperties , VolatileIndex volatileIndex ) throws RepositoryException { } }
|
NodeIndexer indexer = new NodeIndexer ( node , getContext ( ) . getItemStateManager ( ) , nsMappings , extractor ) ; indexer . setSupportHighlighting ( supportHighlighting ) ; indexer . setIndexingConfiguration ( indexingConfig ) ; indexer . setIndexFormatVersion ( indexFormatVersion ) ; indexer . setLoadBatchingThreshold ( indexingLoadBatchingThresholdProperty ) ; indexer . setLoadPropertyByName ( indexingLoadPropertyByName ) ; indexer . setLoadAllProperties ( loadAllProperties ) ; Document doc = indexer . createDoc ( ) ; mergeAggregatedNodeIndexes ( node , doc , loadAllProperties , volatileIndex ) ; return doc ;
|
public class DialogResponseAPDUImpl { /** * ( non - Javadoc )
* @ see org . restcomm . protocols . ss7 . tcap . asn . Encodable # encode ( org . mobicents . protocols . asn . AsnOutputStream ) */
public void encode ( AsnOutputStream aos ) throws EncodeException { } }
|
if ( acn == null ) throw new EncodeException ( "Error encoding DialogResponseAPDU: Application Context Name must not be null" ) ; if ( result == null ) throw new EncodeException ( "Error encoding DialogResponseAPDU: Result must not be null" ) ; if ( diagnostic == null ) throw new EncodeException ( "Error encoding DialogResponseAPDU: Result-source-diagnostic must not be null" ) ; try { aos . writeTag ( Tag . CLASS_APPLICATION , false , _TAG_RESPONSE ) ; int pos = aos . StartContentDefiniteLength ( ) ; if ( ! doNotSendProtocolVersion ) this . protocolVersion . encode ( aos ) ; this . acn . encode ( aos ) ; this . result . encode ( aos ) ; this . diagnostic . encode ( aos ) ; if ( ui != null ) ui . encode ( aos ) ; aos . FinalizeContent ( pos ) ; } catch ( AsnException e ) { throw new EncodeException ( "AsnException while encoding DialogResponseAPDU: " + e . getMessage ( ) , e ) ; }
|
public class CouchbaseConnection { /** * Get the major version of Couchbase server , that is 4 . x or 5 . x
* @ return */
protected int getMajorVersion ( ) { } }
|
return this . cluster . authenticate ( this . passwordAuthenticator ) . clusterManager ( ) . info ( 5 , TimeUnit . SECONDS ) . getMinVersion ( ) . major ( ) ;
|
public class ObjectFactory { /** * Create an instance of { @ link JAXBElement } { @ code < } { @ link MactionType } { @ code > } } */
@ XmlElementDecl ( namespace = "http://www.w3.org/1998/Math/MathML" , name = "maction" ) public JAXBElement < MactionType > createMaction ( MactionType value ) { } }
|
return new JAXBElement < MactionType > ( _Maction_QNAME , MactionType . class , null , value ) ;
|
public class LogPublishingOptionsStatus { /** * The log publishing options configured for the Elasticsearch domain .
* @ param options
* The log publishing options configured for the Elasticsearch domain .
* @ return Returns a reference to this object so that method calls can be chained together . */
public LogPublishingOptionsStatus withOptions ( java . util . Map < String , LogPublishingOption > options ) { } }
|
setOptions ( options ) ; return this ;
|
public class MultiColumnRegexFilter { /** * Clear all text fields in this instance */
public void clearAll ( ) { } }
|
for ( int i = 0 ; i < textFields . size ( ) ; i ++ ) { JTextField textField = textFields . get ( i ) ; if ( textField == null ) { continue ; } textField . setText ( "" ) ; }
|
public class AtomContainerManipulator { /** * Suppress any explicit hydrogens in the provided container . Only hydrogens
* that can be represented as a hydrogen count value on the atom are
* suppressed . The container is updated and no elements are copied , please
* use either { @ link # copyAndSuppressedHydrogens } if you would to preserve
* the old instance .
* @ param org the container from which to remove hydrogens
* @ return the input for convenience
* @ see # copyAndSuppressedHydrogens */
public static IAtomContainer suppressHydrogens ( IAtomContainer org ) { } }
|
boolean anyHydrogenPresent = false ; for ( IAtom atom : org . atoms ( ) ) { if ( "H" . equals ( atom . getSymbol ( ) ) ) { anyHydrogenPresent = true ; break ; } } if ( ! anyHydrogenPresent ) return org ; // crossing atoms , positional variation atoms etc
Set < IAtom > xatoms = Collections . emptySet ( ) ; Collection < Sgroup > sgroups = org . getProperty ( CDKConstants . CTAB_SGROUPS ) ; if ( sgroups != null ) { xatoms = new HashSet < > ( ) ; for ( Sgroup sgroup : sgroups ) { for ( IBond bond : sgroup . getBonds ( ) ) { xatoms . add ( bond . getBegin ( ) ) ; xatoms . add ( bond . getEnd ( ) ) ; } } } // we need fast adjacency checks ( to check for suppression and
// update hydrogen counts )
GraphUtil . EdgeToBondMap bondmap = GraphUtil . EdgeToBondMap . withSpaceFor ( org ) ; final int [ ] [ ] graph = GraphUtil . toAdjList ( org , bondmap ) ; final int nOrgAtoms = org . getAtomCount ( ) ; final int nOrgBonds = org . getBondCount ( ) ; int nCpyAtoms = 0 ; int nCpyBonds = 0 ; final Set < IAtom > hydrogens = new HashSet < IAtom > ( nOrgAtoms ) ; final Set < IBond > bondsToHydrogens = new HashSet < IBond > ( ) ; final IAtom [ ] cpyAtoms = new IAtom [ nOrgAtoms ] ; // filter the original container atoms for those that can / can ' t
// be suppressed
for ( int v = 0 ; v < nOrgAtoms ; v ++ ) { final IAtom atom = org . getAtom ( v ) ; if ( suppressibleHydrogen ( org , graph , bondmap , v ) && ! xatoms . contains ( atom ) ) { hydrogens . add ( atom ) ; incrementImplHydrogenCount ( org . getAtom ( graph [ v ] [ 0 ] ) ) ; } else { cpyAtoms [ nCpyAtoms ++ ] = atom ; } } // none of the hydrogens could be suppressed - no changes need to be made
if ( hydrogens . isEmpty ( ) ) return org ; // we now update the bonds - we have auxiliary variable remaining that
// bypasses the set membership checks if all suppressed bonds are found
IBond [ ] cpyBonds = new IBond [ nOrgBonds - hydrogens . size ( ) ] ; int remaining = hydrogens . size ( ) ; for ( final IBond bond : org . bonds ( ) ) { if ( remaining > 0 && ( hydrogens . contains ( bond . getBegin ( ) ) || hydrogens . contains ( bond . getEnd ( ) ) ) ) { bondsToHydrogens . add ( bond ) ; remaining -- ; continue ; } cpyBonds [ nCpyBonds ++ ] = bond ; } // we know how many hydrogens we removed and we should have removed the
// same number of bonds otherwise the containers is a strange
if ( nCpyBonds != cpyBonds . length ) throw new IllegalArgumentException ( "number of removed bonds was less than the number of removed hydrogens" ) ; List < IStereoElement > elements = new ArrayList < IStereoElement > ( ) ; for ( IStereoElement se : org . stereoElements ( ) ) { if ( se instanceof ITetrahedralChirality ) { ITetrahedralChirality tc = ( ITetrahedralChirality ) se ; IAtom focus = tc . getChiralAtom ( ) ; IAtom [ ] neighbors = tc . getLigands ( ) ; boolean updated = false ; for ( int i = 0 ; i < neighbors . length ; i ++ ) { if ( hydrogens . contains ( neighbors [ i ] ) ) { neighbors [ i ] = focus ; updated = true ; } } // no changes
if ( ! updated ) { elements . add ( tc ) ; } else { elements . add ( new TetrahedralChirality ( focus , neighbors , tc . getStereo ( ) ) ) ; } } else if ( se instanceof ExtendedTetrahedral ) { ExtendedTetrahedral tc = ( ExtendedTetrahedral ) se ; IAtom focus = tc . getFocus ( ) ; IAtom [ ] carriers = tc . getCarriers ( ) . toArray ( new IAtom [ 4 ] ) ; IAtom [ ] ends = ExtendedTetrahedral . findTerminalAtoms ( org , focus ) ; boolean updated = false ; for ( int i = 0 ; i < carriers . length ; i ++ ) { if ( hydrogens . contains ( carriers [ i ] ) ) { if ( org . getBond ( carriers [ i ] , ends [ 0 ] ) != null ) carriers [ i ] = ends [ 0 ] ; else carriers [ i ] = ends [ 1 ] ; updated = true ; } } // no changes
if ( ! updated ) { elements . add ( tc ) ; } else { elements . add ( new ExtendedTetrahedral ( focus , carriers , tc . getConfigOrder ( ) ) ) ; } } else if ( se instanceof IDoubleBondStereochemistry ) { IDoubleBondStereochemistry db = ( IDoubleBondStereochemistry ) se ; Conformation conformation = db . getStereo ( ) ; IBond orgStereo = db . getStereoBond ( ) ; IBond orgLeft = db . getBonds ( ) [ 0 ] ; IBond orgRight = db . getBonds ( ) [ 1 ] ; // we use the following variable names to refer to the
// double bond atoms and substituents
// x y
// u = v
IAtom u = orgStereo . getBegin ( ) ; IAtom v = orgStereo . getEnd ( ) ; IAtom x = orgLeft . getOther ( u ) ; IAtom y = orgRight . getOther ( v ) ; // if xNew = = x and yNew = = y we don ' t need to find the
// connecting bonds
IAtom xNew = x ; IAtom yNew = y ; if ( hydrogens . contains ( x ) ) { conformation = conformation . invert ( ) ; xNew = findSingleBond ( org , u , x ) ; } if ( hydrogens . contains ( y ) ) { conformation = conformation . invert ( ) ; yNew = findSingleBond ( org , v , y ) ; } // no other atoms connected , invalid double - bond configuration
// is removed . example [ 2H ] / C = C / [ H ]
if ( x == null || y == null || xNew == null || yNew == null ) continue ; // no changes
if ( x . equals ( xNew ) && y . equals ( yNew ) ) { elements . add ( db ) ; continue ; } // XXX : may perform slow operations but works for now
IBond cpyLeft = ! Objects . equals ( xNew , x ) ? org . getBond ( u , xNew ) : orgLeft ; IBond cpyRight = ! Objects . equals ( yNew , y ) ? org . getBond ( v , yNew ) : orgRight ; elements . add ( new DoubleBondStereochemistry ( orgStereo , new IBond [ ] { cpyLeft , cpyRight } , conformation ) ) ; } else if ( se instanceof ExtendedCisTrans ) { ExtendedCisTrans db = ( ExtendedCisTrans ) se ; int config = db . getConfigOrder ( ) ; IBond focus = db . getFocus ( ) ; IBond orgLeft = db . getCarriers ( ) . get ( 0 ) ; IBond orgRight = db . getCarriers ( ) . get ( 1 ) ; // we use the following variable names to refer to the
// extended double bond atoms and substituents
// x y
// u = = = v
IAtom [ ] ends = ExtendedCisTrans . findTerminalAtoms ( org , focus ) ; IAtom u = ends [ 0 ] ; IAtom v = ends [ 1 ] ; IAtom x = orgLeft . getOther ( u ) ; IAtom y = orgRight . getOther ( v ) ; // if xNew = = x and yNew = = y we don ' t need to find the
// connecting bonds
IAtom xNew = x ; IAtom yNew = y ; if ( hydrogens . contains ( x ) ) { config ^= 0x3 ; xNew = findSingleBond ( org , u , x ) ; } if ( hydrogens . contains ( y ) ) { config ^= 0x3 ; yNew = findSingleBond ( org , v , y ) ; } // no other atoms connected , invalid double - bond configuration
// is removed . example [ 2H ] / C = C / [ H ]
if ( x == null || y == null || xNew == null || yNew == null ) continue ; // no changes
if ( x . equals ( xNew ) && y . equals ( yNew ) ) { elements . add ( db ) ; continue ; } // XXX : may perform slow operations but works for now
IBond cpyLeft = ! Objects . equals ( xNew , x ) ? org . getBond ( u , xNew ) : orgLeft ; IBond cpyRight = ! Objects . equals ( yNew , y ) ? org . getBond ( v , yNew ) : orgRight ; elements . add ( new ExtendedCisTrans ( focus , new IBond [ ] { cpyLeft , cpyRight } , config ) ) ; } else if ( se instanceof Atropisomeric ) { // can not have any H ' s
elements . add ( se ) ; } } org . setAtoms ( Arrays . copyOf ( cpyAtoms , nCpyAtoms ) ) ; org . setBonds ( cpyBonds ) ; org . setStereoElements ( elements ) ; // single electron and lone pairs are not really used but we update
// them just in - case but we just use the inefficient AtomContainer
// methods
if ( org . getSingleElectronCount ( ) > 0 ) { Set < ISingleElectron > remove = new HashSet < ISingleElectron > ( ) ; for ( ISingleElectron se : org . singleElectrons ( ) ) { if ( hydrogens . contains ( se . getAtom ( ) ) ) remove . add ( se ) ; } for ( ISingleElectron se : remove ) { org . removeSingleElectron ( se ) ; } } if ( org . getLonePairCount ( ) > 0 ) { Set < ILonePair > remove = new HashSet < ILonePair > ( ) ; for ( ILonePair lp : org . lonePairs ( ) ) { if ( hydrogens . contains ( lp . getAtom ( ) ) ) remove . add ( lp ) ; } for ( ILonePair lp : remove ) { org . removeLonePair ( lp ) ; } } if ( sgroups != null ) { for ( Sgroup sgroup : sgroups ) { if ( sgroup . getValue ( SgroupKey . CtabParentAtomList ) != null ) { Collection < IAtom > pal = sgroup . getValue ( SgroupKey . CtabParentAtomList ) ; pal . removeAll ( hydrogens ) ; } for ( IAtom hydrogen : hydrogens ) sgroup . removeAtom ( hydrogen ) ; for ( IBond bondToHydrogen : bondsToHydrogens ) sgroup . removeBond ( bondToHydrogen ) ; } } return org ;
|
public class MCWrapper { /** * Marks this connection for destruction . Used as part of purging the entire
* connection pool . Called on connections which are in use at the time of
* the pool being purged .
* If this object is marked stale when cleanup ( ) is called , a call to destroy ( ) will
* happen under the covers . */
@ Override public void markStale ( ) { } }
|
final boolean isTracingEnabled = TraceComponent . isAnyTracingEnabled ( ) ; if ( isTracingEnabled && tc . isEntryEnabled ( ) ) { Tr . entry ( this , tc , "markStale" ) ; } // This update must be thread safe . I ' m assuming here that a boolean assignment
// is atomic . If that ' s incorrect , then we ' ll need to add a synchronize ( stale )
// gaurd here and on the isStale method .
stale = true ; if ( isTracingEnabled && tc . isEntryEnabled ( ) ) { Tr . exit ( this , tc , "markStale" ) ; }
|
public class DefaultPersistenceDelegate { /** * Returns an expression that represents a call to the bean ' s constructor . The constructor may take zero or more parameters , as specified when this
* < code > DefaultPersistenceDelegate < / code > is constructed .
* @ param oldInstance
* the old instance
* @ param enc
* the encoder that wants to record the old instance
* @ return an expression for instantiating an object of the same type as the old instance */
@ Override protected Expression instantiate ( Object oldInstance , Encoder enc ) { } }
|
Object [ ] args = null ; // Set the constructor arguments if any property names exist
if ( this . propertyNames . length > 0 ) { // Prepare the property descriptors for finding getter method later
BeanInfo info = null ; HashMap < String , PropertyDescriptor > proDscMap = null ; try { info = Introspector . getBeanInfo ( oldInstance . getClass ( ) , Introspector . IGNORE_ALL_BEANINFO ) ; proDscMap = internalAsMap ( info . getPropertyDescriptors ( ) ) ; } catch ( IntrospectionException ex ) { enc . getExceptionListener ( ) . exceptionThrown ( ex ) ; throw new Error ( ex ) ; } // Get the arguments values
args = new Object [ this . propertyNames . length ] ; for ( int i = 0 ; i < this . propertyNames . length ; i ++ ) { String propertyName = propertyNames [ i ] ; if ( null == propertyName || 0 == propertyName . length ( ) ) { continue ; } // Get the value for each property of the given instance
try { args [ i ] = getPropertyValue ( proDscMap , oldInstance , this . propertyNames [ i ] ) ; } catch ( Exception ex ) { enc . getExceptionListener ( ) . exceptionThrown ( ex ) ; } } } return new Expression ( oldInstance , oldInstance . getClass ( ) , BeansUtils . NEW , args ) ;
|
public class ManagedClustersInner { /** * Reset AAD Profile of a managed cluster .
* Update the AAD Profile for a managed cluster .
* @ param resourceGroupName The name of the resource group .
* @ param resourceName The name of the managed cluster resource .
* @ param parameters Parameters supplied to the Reset AAD Profile operation for a Managed Cluster .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the { @ link ServiceResponse } object if successful . */
public Observable < ServiceResponse < Void > > beginResetAADProfileWithServiceResponseAsync ( String resourceGroupName , String resourceName , ManagedClusterAADProfile parameters ) { } }
|
if ( this . client . subscriptionId ( ) == null ) { throw new IllegalArgumentException ( "Parameter this.client.subscriptionId() is required and cannot be null." ) ; } if ( resourceGroupName == null ) { throw new IllegalArgumentException ( "Parameter resourceGroupName is required and cannot be null." ) ; } if ( resourceName == null ) { throw new IllegalArgumentException ( "Parameter resourceName is required and cannot be null." ) ; } if ( parameters == null ) { throw new IllegalArgumentException ( "Parameter parameters is required and cannot be null." ) ; } Validator . validate ( parameters ) ; final String apiVersion = "2019-02-01" ; return service . beginResetAADProfile ( this . client . subscriptionId ( ) , resourceGroupName , resourceName , apiVersion , parameters , this . client . acceptLanguage ( ) , this . client . userAgent ( ) ) . flatMap ( new Func1 < Response < ResponseBody > , Observable < ServiceResponse < Void > > > ( ) { @ Override public Observable < ServiceResponse < Void > > call ( Response < ResponseBody > response ) { try { ServiceResponse < Void > clientResponse = beginResetAADProfileDelegate ( response ) ; return Observable . just ( clientResponse ) ; } catch ( Throwable t ) { return Observable . error ( t ) ; } } } ) ;
|
public class DeviceAppearanceImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
@ Override public void eSet ( int featureID , Object newValue ) { } }
|
switch ( featureID ) { case AfplibPackage . DEVICE_APPEARANCE__DEV_APP : setDevApp ( ( Integer ) newValue ) ; return ; case AfplibPackage . DEVICE_APPEARANCE__RESERVED : setReserved ( ( byte [ ] ) newValue ) ; return ; } super . eSet ( featureID , newValue ) ;
|
public class DescribeMaintenanceWindowTasksResult { /** * Information about the tasks in the Maintenance Window .
* < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use
* { @ link # setTasks ( java . util . Collection ) } or { @ link # withTasks ( java . util . Collection ) } if you want to override the
* existing values .
* @ param tasks
* Information about the tasks in the Maintenance Window .
* @ return Returns a reference to this object so that method calls can be chained together . */
public DescribeMaintenanceWindowTasksResult withTasks ( MaintenanceWindowTask ... tasks ) { } }
|
if ( this . tasks == null ) { setTasks ( new com . amazonaws . internal . SdkInternalList < MaintenanceWindowTask > ( tasks . length ) ) ; } for ( MaintenanceWindowTask ele : tasks ) { this . tasks . add ( ele ) ; } return this ;
|
public class CmsDefaultXmlContentHandler { /** * The errorHandler parameter is optional , if < code > null < / code > is given a new error handler
* instance must be created . < p >
* @ param cms the current OpenCms user context
* @ param value the value to resolve the validation rules for
* @ param errorHandler ( optional ) an error handler instance that contains previous error or warnings
* @ return an error handler that contains all errors and warnings currently found */
protected CmsXmlContentErrorHandler validateCategories ( CmsObject cms , I_CmsXmlContentValue value , CmsXmlContentErrorHandler errorHandler ) { } }
|
if ( ! value . isSimpleType ( ) ) { // do not validate complex types
return errorHandler ; } I_CmsWidget widget = null ; widget = CmsWidgetUtil . collectWidgetInfo ( value ) . getWidget ( ) ; if ( ! ( widget instanceof CmsCategoryWidget ) ) { // do not validate widget that are not category widgets
return errorHandler ; } String stringValue = value . getStringValue ( cms ) ; if ( stringValue . isEmpty ( ) ) { return errorHandler ; } try { String [ ] values = stringValue . split ( "," ) ; for ( int i = 0 ; i < values . length ; i ++ ) { String val = values [ i ] ; String catPath = CmsCategoryService . getInstance ( ) . getCategory ( cms , val ) . getPath ( ) ; String refPath = getReferencePath ( cms , value ) ; CmsCategoryService . getInstance ( ) . readCategory ( cms , catPath , refPath ) ; if ( ( ( CmsCategoryWidget ) widget ) . isOnlyLeafs ( ) ) { if ( ! CmsCategoryService . getInstance ( ) . readCategories ( cms , catPath , false , refPath ) . isEmpty ( ) ) { errorHandler . addError ( value , Messages . get ( ) . getBundle ( value . getLocale ( ) ) . key ( Messages . GUI_CATEGORY_CHECK_NOLEAF_ERROR_0 ) ) ; } } } } catch ( CmsDataAccessException e ) { // expected error in case of empty / invalid value
// see CmsCategory # getCategoryPath ( String , String )
if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( e . getLocalizedMessage ( ) , e ) ; } errorHandler . addError ( value , Messages . get ( ) . getBundle ( value . getLocale ( ) ) . key ( Messages . GUI_CATEGORY_CHECK_EMPTY_ERROR_0 ) ) ; } catch ( CmsException e ) { // unexpected error
if ( LOG . isErrorEnabled ( ) ) { LOG . error ( e . getLocalizedMessage ( ) , e ) ; } errorHandler . addError ( value , e . getLocalizedMessage ( ) ) ; } return errorHandler ;
|
public class Label { /** * Parses the expression into a label expression tree .
* TODO : replace this with a real parser later */
public static Label parseExpression ( String labelExpression ) throws ANTLRException { } }
|
LabelExpressionLexer lexer = new LabelExpressionLexer ( new StringReader ( labelExpression ) ) ; return new LabelExpressionParser ( lexer ) . expr ( ) ;
|
public class ReflectionUtil { /** * Replies the type that corresponds to the specified class .
* If the name corresponds to a primitive type , the low - level type
* will be replied .
* This method extends
* { @ link Class # forName ( String ) } with autoboxing support .
* @ param name is the name of the class to load .
* @ return the loaded class
* @ throws ClassNotFoundException if name names an
* unknown class or primitive */
@ Pure @ SuppressWarnings ( { } }
|
"checkstyle:returncount" , "checkstyle:npathcomplexity" } ) public static Class < ? > forName ( String name ) throws ClassNotFoundException { if ( name == null || "" . equals ( name ) || "null" . equals ( name ) // $ NON - NLS - 1 $ / / $ NON - NLS - 2 $
|| "void" . equals ( name ) ) { // $ NON - NLS - 1 $
return void . class ; } if ( "boolean" . equals ( name ) ) { // $ NON - NLS - 1 $
return boolean . class ; } if ( "byte" . equals ( name ) ) { // $ NON - NLS - 1 $
return byte . class ; } if ( "char" . equals ( name ) ) { // $ NON - NLS - 1 $
return char . class ; } if ( "double" . equals ( name ) ) { // $ NON - NLS - 1 $
return double . class ; } if ( "float" . equals ( name ) ) { // $ NON - NLS - 1 $
return float . class ; } if ( "int" . equals ( name ) ) { // $ NON - NLS - 1 $
return int . class ; } if ( "long" . equals ( name ) ) { // $ NON - NLS - 1 $
return long . class ; } if ( "short" . equals ( name ) ) { // $ NON - NLS - 1 $
return short . class ; } return Class . forName ( name ) ;
|
public class Utils { /** * Returns index of minimum element in a given
* array of doubles . First minimum is returned .
* @ param doubles the array of doubles
* @ return the index of the minimum element */
public static /* @ pure @ */
int minIndex ( double [ ] doubles ) { } }
|
double minimum = 0 ; int minIndex = 0 ; for ( int i = 0 ; i < doubles . length ; i ++ ) { if ( ( i == 0 ) || ( doubles [ i ] < minimum ) ) { minIndex = i ; minimum = doubles [ i ] ; } } return minIndex ;
|
public class GitHubServerConfig { /** * Tries to find { @ link StringCredentials } by id and returns secret from it .
* @ param credentialsId id to find creds
* @ return secret from creds or empty optional */
@ Nonnull public static Optional < Secret > secretFor ( String credentialsId ) { } }
|
List < StringCredentials > creds = filter ( lookupCredentials ( StringCredentials . class , Jenkins . getInstance ( ) , ACL . SYSTEM , Collections . < DomainRequirement > emptyList ( ) ) , withId ( trimToEmpty ( credentialsId ) ) ) ; return FluentIterableWrapper . from ( creds ) . transform ( new NullSafeFunction < StringCredentials , Secret > ( ) { @ Override protected Secret applyNullSafe ( @ Nonnull StringCredentials input ) { return input . getSecret ( ) ; } } ) . first ( ) ;
|
public class Builder { /** * Adds { @ code cachePrefix } to the key . This ensures there are no cache
* collisions if the same EVCache app is used across multiple use cases .
* If the cache is not shared we recommend to set this to
* < code > null < / code > . Default is < code > null < / code > .
* @ param cacheName .
* The cache prefix cannot contain colon ( ' : ' ) in it .
* @ return this { @ code Builder } object */
public Builder setCachePrefix ( String cachePrefix ) { } }
|
this . _cachePrefix = cachePrefix ; if ( _cachePrefix != null && _cachePrefix . indexOf ( ':' ) != - 1 ) throw new IllegalArgumentException ( "param cacheName cannot contain ':' character." ) ; return this ;
|
public class QueryLifecycle { /** * Emit logs and metrics for this query .
* @ param e exception that occurred while processing this query
* @ param remoteAddress remote address , for logging ; or null if unknown
* @ param bytesWritten number of bytes written ; will become a query / bytes metric if > = 0 */
@ SuppressWarnings ( "unchecked" ) public void emitLogsAndMetrics ( @ Nullable final Throwable e , @ Nullable final String remoteAddress , final long bytesWritten ) { } }
|
if ( baseQuery == null ) { // Never initialized , don ' t log or emit anything .
return ; } if ( state == State . DONE ) { log . warn ( "Tried to emit logs and metrics twice for query[%s]!" , baseQuery . getId ( ) ) ; } state = State . DONE ; final boolean success = e == null ; try { final long queryTimeNs = System . nanoTime ( ) - startNs ; QueryMetrics queryMetrics = DruidMetrics . makeRequestMetrics ( queryMetricsFactory , toolChest , baseQuery , StringUtils . nullToEmptyNonDruidDataString ( remoteAddress ) ) ; queryMetrics . success ( success ) ; queryMetrics . reportQueryTime ( queryTimeNs ) ; if ( bytesWritten >= 0 ) { queryMetrics . reportQueryBytes ( bytesWritten ) ; } if ( authenticationResult != null ) { queryMetrics . identity ( authenticationResult . getIdentity ( ) ) ; } queryMetrics . emit ( emitter ) ; final Map < String , Object > statsMap = new LinkedHashMap < > ( ) ; statsMap . put ( "query/time" , TimeUnit . NANOSECONDS . toMillis ( queryTimeNs ) ) ; statsMap . put ( "query/bytes" , bytesWritten ) ; statsMap . put ( "success" , success ) ; if ( authenticationResult != null ) { statsMap . put ( "identity" , authenticationResult . getIdentity ( ) ) ; } if ( e != null ) { statsMap . put ( "exception" , e . toString ( ) ) ; if ( e instanceof QueryInterruptedException ) { // Mimic behavior from QueryResource , where this code was originally taken from .
log . warn ( e , "Exception while processing queryId [%s]" , baseQuery . getId ( ) ) ; statsMap . put ( "interrupted" , true ) ; statsMap . put ( "reason" , e . toString ( ) ) ; } } requestLogger . logNativeQuery ( RequestLogLine . forNative ( baseQuery , DateTimes . utc ( startMs ) , StringUtils . nullToEmptyNonDruidDataString ( remoteAddress ) , new QueryStats ( statsMap ) ) ) ; } catch ( Exception ex ) { log . error ( ex , "Unable to log query [%s]!" , baseQuery ) ; }
|
public class RealmTableImpl { /** * ( non - Javadoc )
* @ see org . jdiameter . client . api . controller . IRealmTable # matchRealm ( org . jdiameter . client . api . IAnswer , java . lang . String ) */
@ Override public Realm matchRealm ( IAnswer message , String destRealm ) { } }
|
return this . matchRealm ( ( IMessage ) message , destRealm ) ;
|
public class OperaSettings { /** * Sets the X display to use . Only works on GNU / Linux . Typically this allows you to open Opera
* in an X virtual framebuffer .
* @ param display the X display to use
* @ throws UnsupportedOperationException if on a non - GNU / Linux operating system */
public void setDisplay ( int display ) { } }
|
if ( ! Platform . getCurrent ( ) . is ( LINUX ) ) { throw new UnsupportedOperationException ( "Unsupported platform: " + Platform . getCurrent ( ) ) ; } options . get ( DISPLAY ) . setValue ( display ) ;
|
public class AbderaAtomFeedProvider { /** * TODO : Implement properly
* We require the request URL
* @ param matchingResults
* @ return */
public static Feed generateFeed ( String request , String matcherDetails , Map < URI , MatchResult > matchingResults ) { } }
|
Feed feed = initialiseFeed ( request , matcherDetails ) ; // Return empty feed if null
if ( matchingResults == null ) { return feed ; } int numResults = matchingResults . size ( ) ; log . debug ( matchingResults . keySet ( ) . toString ( ) ) ; Set < Map . Entry < URI , MatchResult > > entries = matchingResults . entrySet ( ) ; for ( Map . Entry < URI , MatchResult > entry : entries ) { Entry rssEntry = createMatchResultEntry ( entry . getKey ( ) , entry . getValue ( ) ) ; feed . addEntry ( rssEntry ) ; } return feed ;
|
public class Years { /** * Subtracts this amount from the specified temporal object .
* This returns a temporal object of the same observable type as the input
* with this amount subtracted .
* In most cases , it is clearer to reverse the calling pattern by using
* { @ link Temporal # minus ( TemporalAmount ) } .
* < pre >
* / / these two lines are equivalent , but the second approach is recommended
* dateTime = thisAmount . subtractFrom ( dateTime ) ;
* dateTime = dateTime . minus ( thisAmount ) ;
* < / pre >
* Only non - zero amounts will be subtracted .
* This instance is immutable and unaffected by this method call .
* @ param temporal the temporal object to adjust , not null
* @ return an object of the same type with the adjustment made , not null
* @ throws DateTimeException if unable to subtract
* @ throws UnsupportedTemporalTypeException if the YEARS unit is not supported
* @ throws ArithmeticException if numeric overflow occurs */
@ Override public Temporal subtractFrom ( Temporal temporal ) { } }
|
if ( years != 0 ) { temporal = temporal . minus ( years , YEARS ) ; } return temporal ;
|
public class Async { /** * Sends a non - expiring { @ link TextMessage } with average priority .
* @ param queueName name of queue
* @ param text body of message */
public void sendTextMessage ( String queueName , String text ) { } }
|
sendTextMessage ( queueName , text , DeliveryMode . NON_PERSISTENT , 4 , 0 ) ;
|
public class OssIndexAnalyzer { /** * Transform OSS Index component - report to ODC vulnerability . */
private Vulnerability transform ( final ComponentReport report , final ComponentReportVulnerability source ) { } }
|
Vulnerability result = new Vulnerability ( ) ; result . setSource ( Vulnerability . Source . OSSINDEX ) ; if ( source . getCve ( ) != null ) { result . setName ( source . getCve ( ) ) ; } else { String cve = null ; if ( source . getTitle ( ) != null ) { Matcher matcher = CVE_PATTERN . matcher ( source . getTitle ( ) ) ; if ( matcher . find ( ) ) { cve = matcher . group ( ) ; } else { cve = source . getTitle ( ) ; } } if ( cve == null && source . getReference ( ) != null ) { Matcher matcher = CVE_PATTERN . matcher ( source . getReference ( ) . toString ( ) ) ; if ( matcher . find ( ) ) { cve = matcher . group ( ) ; } } result . setName ( cve != null ? cve : source . getId ( ) ) ; } result . setDescription ( source . getDescription ( ) ) ; result . addCwe ( source . getCwe ( ) ) ; float cvssScore = source . getCvssScore ( ) != null ? source . getCvssScore ( ) : - 1 ; if ( source . getCvssVector ( ) != null ) { // convert cvss details
CvssVector cvssVector = CvssVectorFactory . create ( source . getCvssVector ( ) ) ; Map < String , String > metrics = cvssVector . getMetrics ( ) ; if ( cvssVector instanceof Cvss2Vector ) { result . setCvssV2 ( new CvssV2 ( cvssScore , metrics . get ( Cvss2Vector . ACCESS_VECTOR ) , metrics . get ( Cvss2Vector . ACCESS_COMPLEXITY ) , metrics . get ( Cvss2Vector . AUTHENTICATION ) , metrics . get ( Cvss2Vector . CONFIDENTIALITY_IMPACT ) , metrics . get ( Cvss2Vector . INTEGRITY_IMPACT ) , metrics . get ( Cvss2Vector . AVAILABILITY_IMPACT ) , Cvss2Severity . of ( cvssScore ) . name ( ) ) ) ; } else if ( cvssVector instanceof Cvss3Vector ) { result . setCvssV3 ( new CvssV3 ( metrics . get ( Cvss3Vector . ATTACK_VECTOR ) , metrics . get ( Cvss3Vector . ATTACK_COMPLEXITY ) , metrics . get ( Cvss3Vector . PRIVILEGES_REQUIRED ) , metrics . get ( Cvss3Vector . USER_INTERACTION ) , metrics . get ( Cvss3Vector . SCOPE ) , metrics . get ( Cvss3Vector . CONFIDENTIALITY_IMPACT ) , metrics . get ( Cvss3Vector . INTEGRITY_IMPACT ) , metrics . get ( Cvss3Vector . AVAILABILITY_IMPACT ) , cvssScore , Cvss3Severity . of ( cvssScore ) . name ( ) ) ) ; } else { log . warn ( "Unsupported CVSS vector: {}" , cvssVector ) ; result . setUnscoredSeverity ( Float . toString ( cvssScore ) ) ; } } else { log . debug ( "OSS has no vector for {}" , result . getName ( ) ) ; result . setUnscoredSeverity ( Float . toString ( cvssScore ) ) ; } // generate a reference to the vulnerability details on OSS Index
result . addReference ( REFERENCE_TYPE , source . getTitle ( ) , source . getReference ( ) . toString ( ) ) ; // attach vulnerable software details as best we can
PackageUrl purl = report . getCoordinates ( ) ; try { VulnerableSoftwareBuilder builder = new VulnerableSoftwareBuilder ( ) . part ( Part . APPLICATION ) . vendor ( purl . getNamespaceAsString ( ) ) . product ( purl . getName ( ) ) . version ( purl . getVersion ( ) ) ; // TODO : consider if we want / need to extract version - ranges to apply to vulnerable - software ?
VulnerableSoftware software = builder . build ( ) ; result . addVulnerableSoftware ( software ) ; result . setMatchedVulnerableSoftware ( software ) ; } catch ( CpeValidationException e ) { log . warn ( "Unable to construct vulnerable-software for: {}" , purl , e ) ; } return result ;
|
public class AbstractMutableMapBuilder { /** * Post a delta consisting of a single update .
* @ param key
* key of entry to update .
* @ param mutate
* mutation specification . */
protected final void doUpdates ( final K key , final Function < ? super V , ? extends V > mutate ) { } }
|
apply ( new Update < > ( key , mutate ) ) ;
|
public class SARLHighlightingCalculator { /** * Replies if the given call is for a capacity function call .
* @ param feature the feature to test .
* @ return { @ code true } if the feature is capacity ( s method . */
protected boolean isCapacityMethodCall ( JvmOperation feature ) { } }
|
if ( feature != null ) { final JvmDeclaredType container = feature . getDeclaringType ( ) ; if ( container instanceof JvmGenericType ) { return this . inheritanceHelper . isSarlCapacity ( ( JvmGenericType ) container ) ; } } return false ;
|
public class MisoScenePanel { /** * Dirties the specified indicator . */
protected void dirtyIndicator ( SceneObjectIndicator indic ) { } }
|
if ( indic != null ) { Rectangle r = indic . getBounds ( ) ; if ( r != null ) { _remgr . invalidateRegion ( r ) ; } }
|
public class InternalSARLParser { /** * InternalSARL . g : 8980:1 : ruleXUnaryOperation returns [ EObject current = null ] : ( ( ( ) ( ( ruleOpUnary ) ) ( ( lv _ operand _ 2_0 = ruleXUnaryOperation ) ) ) | this _ XPostfixOperation _ 3 = ruleXPostfixOperation ) ; */
public final EObject ruleXUnaryOperation ( ) throws RecognitionException { } }
|
EObject current = null ; EObject lv_operand_2_0 = null ; EObject this_XPostfixOperation_3 = null ; enterRule ( ) ; try { // InternalSARL . g : 8986:2 : ( ( ( ( ) ( ( ruleOpUnary ) ) ( ( lv _ operand _ 2_0 = ruleXUnaryOperation ) ) ) | this _ XPostfixOperation _ 3 = ruleXPostfixOperation ) )
// InternalSARL . g : 8987:2 : ( ( ( ) ( ( ruleOpUnary ) ) ( ( lv _ operand _ 2_0 = ruleXUnaryOperation ) ) ) | this _ XPostfixOperation _ 3 = ruleXPostfixOperation )
{ // InternalSARL . g : 8987:2 : ( ( ( ) ( ( ruleOpUnary ) ) ( ( lv _ operand _ 2_0 = ruleXUnaryOperation ) ) ) | this _ XPostfixOperation _ 3 = ruleXPostfixOperation )
int alt238 = 2 ; int LA238_0 = input . LA ( 1 ) ; if ( ( ( LA238_0 >= 73 && LA238_0 <= 75 ) ) ) { alt238 = 1 ; } else if ( ( ( LA238_0 >= RULE_STRING && LA238_0 <= RULE_RICH_TEXT_START ) || ( LA238_0 >= RULE_HEX && LA238_0 <= RULE_DECIMAL ) || LA238_0 == 25 || ( LA238_0 >= 28 && LA238_0 <= 29 ) || LA238_0 == 36 || ( LA238_0 >= 39 && LA238_0 <= 40 ) || ( LA238_0 >= 42 && LA238_0 <= 45 ) || ( LA238_0 >= 48 && LA238_0 <= 49 ) || LA238_0 == 51 || LA238_0 == 55 || ( LA238_0 >= 60 && LA238_0 <= 63 ) || ( LA238_0 >= 67 && LA238_0 <= 68 ) || ( LA238_0 >= 78 && LA238_0 <= 96 ) || LA238_0 == 106 || LA238_0 == 129 || ( LA238_0 >= 131 && LA238_0 <= 140 ) ) ) { alt238 = 2 ; } else { if ( state . backtracking > 0 ) { state . failed = true ; return current ; } NoViableAltException nvae = new NoViableAltException ( "" , 238 , 0 , input ) ; throw nvae ; } switch ( alt238 ) { case 1 : // InternalSARL . g : 8988:3 : ( ( ) ( ( ruleOpUnary ) ) ( ( lv _ operand _ 2_0 = ruleXUnaryOperation ) ) )
{ // InternalSARL . g : 8988:3 : ( ( ) ( ( ruleOpUnary ) ) ( ( lv _ operand _ 2_0 = ruleXUnaryOperation ) ) )
// InternalSARL . g : 8989:4 : ( ) ( ( ruleOpUnary ) ) ( ( lv _ operand _ 2_0 = ruleXUnaryOperation ) )
{ // InternalSARL . g : 8989:4 : ( )
// InternalSARL . g : 8990:5:
{ if ( state . backtracking == 0 ) { current = forceCreateModelElement ( grammarAccess . getXUnaryOperationAccess ( ) . getXUnaryOperationAction_0_0 ( ) , current ) ; } } // InternalSARL . g : 8996:4 : ( ( ruleOpUnary ) )
// InternalSARL . g : 8997:5 : ( ruleOpUnary )
{ // InternalSARL . g : 8997:5 : ( ruleOpUnary )
// InternalSARL . g : 8998:6 : ruleOpUnary
{ if ( state . backtracking == 0 ) { if ( current == null ) { current = createModelElement ( grammarAccess . getXUnaryOperationRule ( ) ) ; } } if ( state . backtracking == 0 ) { newCompositeNode ( grammarAccess . getXUnaryOperationAccess ( ) . getFeatureJvmIdentifiableElementCrossReference_0_1_0 ( ) ) ; } pushFollow ( FOLLOW_45 ) ; ruleOpUnary ( ) ; state . _fsp -- ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { afterParserOrEnumRuleCall ( ) ; } } } // InternalSARL . g : 9012:4 : ( ( lv _ operand _ 2_0 = ruleXUnaryOperation ) )
// InternalSARL . g : 9013:5 : ( lv _ operand _ 2_0 = ruleXUnaryOperation )
{ // InternalSARL . g : 9013:5 : ( lv _ operand _ 2_0 = ruleXUnaryOperation )
// InternalSARL . g : 9014:6 : lv _ operand _ 2_0 = ruleXUnaryOperation
{ if ( state . backtracking == 0 ) { newCompositeNode ( grammarAccess . getXUnaryOperationAccess ( ) . getOperandXUnaryOperationParserRuleCall_0_2_0 ( ) ) ; } pushFollow ( FOLLOW_2 ) ; lv_operand_2_0 = ruleXUnaryOperation ( ) ; state . _fsp -- ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { if ( current == null ) { current = createModelElementForParent ( grammarAccess . getXUnaryOperationRule ( ) ) ; } set ( current , "operand" , lv_operand_2_0 , "io.sarl.lang.SARL.XUnaryOperation" ) ; afterParserOrEnumRuleCall ( ) ; } } } } } break ; case 2 : // InternalSARL . g : 9033:3 : this _ XPostfixOperation _ 3 = ruleXPostfixOperation
{ if ( state . backtracking == 0 ) { newCompositeNode ( grammarAccess . getXUnaryOperationAccess ( ) . getXPostfixOperationParserRuleCall_1 ( ) ) ; } pushFollow ( FOLLOW_2 ) ; this_XPostfixOperation_3 = ruleXPostfixOperation ( ) ; state . _fsp -- ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { current = this_XPostfixOperation_3 ; afterParserOrEnumRuleCall ( ) ; } } break ; } } if ( state . backtracking == 0 ) { leaveRule ( ) ; } } catch ( RecognitionException re ) { recover ( input , re ) ; appendSkippedTokens ( ) ; } finally { } return current ;
|
public class KiteTicker { /** * Creates url for websocket connection . */
private void createUrl ( String accessToken , String apiKey ) { } }
|
wsuri = new Routes ( ) . getWsuri ( ) . replace ( ":access_token" , accessToken ) . replace ( ":api_key" , apiKey ) ;
|
public class BusinessProcess { /** * Signals the current execution , see { @ link RuntimeService # signal ( String ) }
* Ends the current unit of work ( flushes changes to process variables set
* using { @ link # setVariable ( String , Object ) } or made on
* { @ link BusinessProcessScoped @ BusinessProcessScoped } beans ) .
* @ throws ProcessEngineCdiException
* if no execution is currently associated
* @ throws ProcessEngineException
* if the activiti command fails */
public void signalExecution ( ) { } }
|
assertExecutionAssociated ( ) ; processEngine . getRuntimeService ( ) . setVariablesLocal ( associationManager . getExecutionId ( ) , getAndClearCachedLocalVariableMap ( ) ) ; processEngine . getRuntimeService ( ) . signal ( associationManager . getExecutionId ( ) , getAndClearCachedVariableMap ( ) ) ; associationManager . disAssociate ( ) ;
|
public class RibbonLoadBalancerClient { /** * New : Select a server using a ' key ' .
* @ param serviceId of the service to choose an instance for
* @ param hint to specify the service instance
* @ return the selected { @ link ServiceInstance } */
public ServiceInstance choose ( String serviceId , Object hint ) { } }
|
Server server = getServer ( getLoadBalancer ( serviceId ) , hint ) ; if ( server == null ) { return null ; } return new RibbonServer ( serviceId , server , isSecure ( server , serviceId ) , serverIntrospector ( serviceId ) . getMetadata ( server ) ) ;
|
public class Destination { /** * An array that contains the email addresses of the " BCC " ( blind carbon copy ) recipients for the email .
* @ param bccAddresses
* An array that contains the email addresses of the " BCC " ( blind carbon copy ) recipients for the email . */
public void setBccAddresses ( java . util . Collection < String > bccAddresses ) { } }
|
if ( bccAddresses == null ) { this . bccAddresses = null ; return ; } this . bccAddresses = new java . util . ArrayList < String > ( bccAddresses ) ;
|
public class ParamUtil { /** * 将Map型转为请求参数型
* @ param data Map类型的参数
* @ return url请求的参数
* @ throws UnsupportedEncodingException 异常 */
public static String getUrlParamsByMap ( Map < String , String > data ) throws UnsupportedEncodingException { } }
|
if ( data == null || data . isEmpty ( ) ) return null ; StringBuilder sb = new StringBuilder ( ) ; for ( Map . Entry < String , String > i : data . entrySet ( ) ) { sb . append ( i . getKey ( ) ) . append ( "=" ) . append ( URLEncoder . encode ( i . getValue ( ) , Charset . UTF_8 ) ) . append ( "&" ) ; } String str = sb . toString ( ) ; return str . substring ( 0 , str . length ( ) - 1 ) ;
|
public class SegmentAggregator { /** * Processes an UpdateAttributesOperation .
* @ param operation The Operation to process . */
private void addUpdateAttributesOperation ( UpdateAttributesOperation operation ) { } }
|
if ( ! this . metadata . isSealedInStorage ( ) ) { // Only process the operation if the Segment is not sealed in Storage . If it is , then so is the Attribute Index ,
// and it means this operation has already been applied to the index .
Map < UUID , Long > attributes = getExtendedAttributes ( operation ) ; if ( ! attributes . isEmpty ( ) ) { AggregatedAppendOperation aggregatedAppend = getOrCreateAggregatedAppend ( this . metadata . getStorageLength ( ) , operation . getSequenceNumber ( ) ) ; aggregatedAppend . includeAttributes ( attributes ) ; } }
|
public class Persistence { /** * Gets the value of the persistenceUnit property .
* This accessor method returns a reference to the live list ,
* not a snapshot . Therefore any modification you make to the
* returned list will be present inside the JAXB object .
* This is why there is not a < CODE > set < / CODE > method for the persistenceUnit property .
* For example , to add a new item , do as follows :
* < pre >
* getPersistenceUnit ( ) . add ( newItem ) ;
* < / pre >
* Objects of the following type ( s ) are allowed in the list
* { @ link Persistence . PersistenceUnit } */
public List < Persistence . PersistenceUnit > getPersistenceUnit ( ) { } }
|
if ( persistenceUnit == null ) { persistenceUnit = new ArrayList < Persistence . PersistenceUnit > ( ) ; } return this . persistenceUnit ;
|
public class JwkProviderBuilder { /** * Enable the cache specifying size and expire time .
* @ param bucketSize max number of jwks to deliver in the given rate .
* @ param refillRate amount of time to wait before a jwk can the jwk will be cached
* @ param unit unit of time for the expire of jwk
* @ return the builder */
public JwkProviderBuilder rateLimited ( long bucketSize , long refillRate , TimeUnit unit ) { } }
|
bucket = new BucketImpl ( bucketSize , refillRate , unit ) ; return this ;
|
public class RxApollo { /** * Converts an { @ link ApolloQueryWatcher } into an Observable .
* @ param watcher the ApolloQueryWatcher to convert
* @ param backpressureMode the back pressure strategy to apply to the observable source .
* @ param < T > the value type
* @ return the converted Observable */
@ NotNull public static < T > Observable < Response < T > > from ( @ NotNull final ApolloQueryWatcher < T > watcher , @ NotNull Emitter . BackpressureMode backpressureMode ) { } }
|
checkNotNull ( backpressureMode , "backpressureMode == null" ) ; checkNotNull ( watcher , "watcher == null" ) ; return Observable . create ( new Action1 < Emitter < Response < T > > > ( ) { @ Override public void call ( final Emitter < Response < T > > emitter ) { final AtomicBoolean canceled = new AtomicBoolean ( ) ; emitter . setCancellation ( new Cancellable ( ) { @ Override public void cancel ( ) throws Exception { canceled . set ( true ) ; watcher . cancel ( ) ; } } ) ; watcher . enqueueAndWatch ( new ApolloCall . Callback < T > ( ) { @ Override public void onResponse ( @ NotNull Response < T > response ) { if ( ! canceled . get ( ) ) { emitter . onNext ( response ) ; } } @ Override public void onFailure ( @ NotNull ApolloException e ) { Exceptions . throwIfFatal ( e ) ; if ( ! canceled . get ( ) ) { emitter . onError ( e ) ; } } } ) ; } } , backpressureMode ) ;
|
public class FeatureTokens { /** * indexed setter for beginnings - sets an indexed value -
* @ generated
* @ param i index in the array to set
* @ param v value to set into the array */
public void setBeginnings ( int i , int v ) { } }
|
if ( FeatureTokens_Type . featOkTst && ( ( FeatureTokens_Type ) jcasType ) . casFeat_beginnings == null ) jcasType . jcas . throwFeatMissing ( "beginnings" , "ch.epfl.bbp.uima.types.FeatureTokens" ) ; jcasType . jcas . checkArrayBounds ( jcasType . ll_cas . ll_getRefValue ( addr , ( ( FeatureTokens_Type ) jcasType ) . casFeatCode_beginnings ) , i ) ; jcasType . ll_cas . ll_setIntArrayValue ( jcasType . ll_cas . ll_getRefValue ( addr , ( ( FeatureTokens_Type ) jcasType ) . casFeatCode_beginnings ) , i , v ) ;
|
public class FileFetcher { /** * Downloads an object . It retries downloading { @ link # maxFetchRetry }
* times and throws an exception .
* @ param object an object to be downloaded
* @ param outFile a file which the object data is stored
* @ return number of downloaded bytes */
@ Override protected long download ( T object , File outFile ) throws IOException { } }
|
try { return RetryUtils . retry ( ( ) -> { try ( final InputStream is = openObjectFunction . open ( object ) ; final OutputStream os = new FileOutputStream ( outFile ) ) { return IOUtils . copyLarge ( is , os , buffer ) ; } } , retryCondition , outFile :: delete , maxFetchRetry + 1 , StringUtils . format ( "Failed to download object[%s]" , object ) ) ; } catch ( Exception e ) { throw new IOException ( e ) ; }
|
public class ChronoFormatter { /** * also called in PatternType */
static boolean hasUnixChronology ( Chronology < ? > chronology ) { } }
|
Chronology < ? > c = chronology ; do { if ( UnixTime . class . isAssignableFrom ( c . getChronoType ( ) ) ) { return true ; } } while ( ( c = c . preparser ( ) ) != null ) ; return false ;
|
public class ProjectionOperationFactory { /** * Ensures that all expressions have a derivable name . There is a few categories and naming
* rules that apply :
* < ul >
* < li > { @ link FieldReferenceExpression } , { @ link TableReferenceExpression } ,
* { @ link LocalReferenceExpression } and { @ link BuiltInFunctionDefinitions # AS }
* are already named } < / li >
* < li > { @ link BuiltInFunctionDefinitions # CAST } use the name of underlying expression
* appended with the name of the type < / li >
* < li > { @ link BuiltInFunctionDefinitions # GET } uses pattern
* < i > [ underlying name ] [ $ fieldName ] { 1 , } < / i > < / li >
* < li > if none of the above apply a name < i > [ _ c ( idx ) ] < / i > is used where idx is
* the index within given expressions < / li >
* < / ul > */
private List < Expression > nameExpressions ( List < Expression > expression ) { } }
|
return IntStream . range ( 0 , expression . size ( ) ) . mapToObj ( idx -> { currentFieldIndex = idx ; return expression . get ( idx ) . accept ( namingVisitor ) ; } ) . collect ( Collectors . toList ( ) ) ;
|
public class Constraints { /** * Get a { @ link Predicate } for testing entity objects that match the given
* { @ link StorageKey } .
* @ param < E > The type of entities to be matched
* @ param key a StorageKey for entities tested with the Predicate
* @ return a Predicate to test if entity objects satisfy this constraint set */
public < E > Predicate < E > toEntityPredicate ( StorageKey key , EntityAccessor < E > accessor ) { } }
|
if ( key != null ) { Map < String , Predicate > predicates = minimizeFor ( key ) ; if ( predicates . isEmpty ( ) ) { return alwaysTrue ( ) ; } return entityPredicate ( predicates , schema , accessor , strategy ) ; } return toEntityPredicate ( accessor ) ;
|
public class policyhttpcallout { /** * Use this API to fetch policyhttpcallout resource of given name . */
public static policyhttpcallout get ( nitro_service service , String name ) throws Exception { } }
|
policyhttpcallout obj = new policyhttpcallout ( ) ; obj . set_name ( name ) ; policyhttpcallout response = ( policyhttpcallout ) obj . get_resource ( service ) ; return response ;
|
public class PermilOperator { /** * Gets the permil of the amount .
* This returns the monetary amount in permil . For example , for 10 % ' EUR
* 2.35 ' will return 0.235.
* This is returned as a { @ code MonetaryAmount } .
* @ return the permil result of the amount , never { @ code null } */
@ Override public MonetaryAmount apply ( MonetaryAmount amount ) { } }
|
Objects . requireNonNull ( amount , "Amount required." ) ; return amount . multiply ( permilValue ) ;
|
public class YarnJobSubmissionClient { /** * We leave a file behind in job submission directory so that clr client can figure out
* the applicationId and yarn rest endpoint .
* @ param driverFolder
* @ param applicationId
* @ throws IOException */
private void writeDriverHttpEndPoint ( final File driverFolder , final String applicationId , final Path dfsPath ) throws IOException { } }
|
final FileSystem fs = FileSystem . get ( yarnConfiguration ) ; final Path httpEndpointPath = new Path ( dfsPath , fileNames . getDriverHttpEndpoint ( ) ) ; String trackingUri = null ; LOG . log ( Level . INFO , "Attempt to reading " + httpEndpointPath . toString ( ) ) ; for ( int i = 0 ; i < 60 ; i ++ ) { try { LOG . log ( Level . FINE , "Attempt " + i + " reading " + httpEndpointPath . toString ( ) ) ; if ( fs . exists ( httpEndpointPath ) ) { final FSDataInputStream input = fs . open ( httpEndpointPath ) ; final BufferedReader reader = new BufferedReader ( new InputStreamReader ( input , StandardCharsets . UTF_8 ) ) ; trackingUri = reader . readLine ( ) ; reader . close ( ) ; break ; } } catch ( Exception ignored ) { // readLine might throw IOException although httpEndpointPath file exists .
// the for - loop waits until the actual content of file is written completely
} try { Thread . sleep ( 1000 ) ; } catch ( InterruptedException ex2 ) { break ; } } if ( null == trackingUri ) { trackingUri = "" ; LOG . log ( Level . WARNING , "Failed reading " + httpEndpointPath . toString ( ) ) ; } else { LOG . log ( Level . INFO , "Completed reading trackingUri :" + trackingUri ) ; } final File driverHttpEndpointFile = new File ( driverFolder , fileNames . getDriverHttpEndpoint ( ) ) ; BufferedWriter out = new BufferedWriter ( new OutputStreamWriter ( new FileOutputStream ( driverHttpEndpointFile ) , StandardCharsets . UTF_8 ) ) ; out . write ( applicationId + "\n" ) ; out . write ( trackingUri + "\n" ) ; String addr = yarnConfiguration . get ( "yarn.resourcemanager.webapp.address" ) ; if ( null == addr || addr . startsWith ( "0.0.0.0" ) ) { String str2 = yarnConfiguration . get ( "yarn.resourcemanager.ha.rm-ids" ) ; if ( null != str2 ) { for ( String rm : str2 . split ( "," ) ) { out . write ( yarnConfiguration . get ( "yarn.resourcemanager.webapp.address." + rm ) + "\n" ) ; } } } else { out . write ( addr + "\n" ) ; } out . close ( ) ;
|
public class NetworkInterfaceTapConfigurationsInner { /** * Deletes the specified tap configuration from the NetworkInterface .
* @ param resourceGroupName The name of the resource group .
* @ param networkInterfaceName The name of the network interface .
* @ param tapConfigurationName The name of the tap configuration .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ throws CloudException thrown if the request is rejected by server
* @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent */
public void beginDelete ( String resourceGroupName , String networkInterfaceName , String tapConfigurationName ) { } }
|
beginDeleteWithServiceResponseAsync ( resourceGroupName , networkInterfaceName , tapConfigurationName ) . toBlocking ( ) . single ( ) . body ( ) ;
|
public class JUnitInContainerLaunchShortcut { /** * / * ( non - Javadoc )
* @ see org . eclipse . jdt . junit . launcher . JUnitLaunchShortcut # createLaunchConfiguration ( org . eclipse . jdt . core . IJavaElement ) */
@ Override protected ILaunchConfigurationWorkingCopy createLaunchConfiguration ( IJavaElement element ) throws CoreException { } }
|
ILaunchConfigurationWorkingCopy wc = super . createLaunchConfiguration ( element ) ; String containerUrl = JicUnitPlugin . getDefault ( ) . getPreferenceStore ( ) . getString ( CONTAINER_URL ) ; String extraVwArg = "-D" + CONTAINER_URL + "=" + containerUrl ; wc . setAttribute ( IJavaLaunchConfigurationConstants . ATTR_VM_ARGUMENTS , extraVwArg ) ; return wc ;
|
public class DockerRule { /** * return { @ link CLI } for specified jenkins container ID */
public DockerCLI createCliForContainer ( String containerId ) throws IOException , InterruptedException { } }
|
LOG . trace ( "Creating cli for container {}." , containerId ) ; final InspectContainerResponse inspect = getDockerCli ( ) . inspectContainerCmd ( containerId ) . exec ( ) ; return createCliForInspect ( inspect ) ;
|
public class BeanExtractor { /** * { @ inheritDoc } */
@ SuppressWarnings ( "PMD.CompareObjectsWithEquals" ) public Object extractObject ( ObjectToJsonConverter pConverter , Object pValue , Stack < String > pPathParts , boolean jsonify ) throws AttributeNotFoundException { } }
|
// Wrap fault handler if a wildcard path pattern is present
ValueFaultHandler faultHandler = pConverter . getValueFaultHandler ( ) ; String pathPart = pPathParts . isEmpty ( ) ? null : pPathParts . pop ( ) ; if ( pathPart != null ) { // Still some path elements available , so dive deeper
Object attributeValue = extractBeanPropertyValue ( pValue , pathPart , faultHandler ) ; return pConverter . extractObject ( attributeValue , pPathParts , jsonify ) ; } else { if ( jsonify ) { // We need the jsonfied value from here on .
return exctractJsonifiedValue ( pConverter , pValue , pPathParts ) ; } else { // No jsonification requested , hence we are returning the object itself
return pValue ; } }
|
public class AbstractBigtableTable { /** * { @ inheritDoc } */
@ Override public Result [ ] get ( List < Get > gets ) throws IOException { } }
|
LOG . trace ( "get(List<>)" ) ; if ( gets == null || gets . isEmpty ( ) ) { return new Result [ 0 ] ; } else if ( gets . size ( ) == 1 ) { try { return new Result [ ] { get ( gets . get ( 0 ) ) } ; } catch ( IOException e ) { throw createRetriesExhaustedWithDetailsException ( e , gets . get ( 0 ) ) ; } } else { try ( Scope scope = TRACER . spanBuilder ( "BigtableTable.get" ) . startScopedSpan ( ) ) { addBatchSizeAnnotation ( gets ) ; return getBatchExecutor ( ) . batch ( gets ) ; } }
|
public class DfuServiceInitiator { /** * Starts the DFU service .
* @ param context the application context
* @ param service the class derived from the BaseDfuService */
public DfuServiceController start ( @ NonNull final Context context , @ NonNull final Class < ? extends DfuBaseService > service ) { } }
|
if ( fileType == - 1 ) throw new UnsupportedOperationException ( "You must specify the firmware file before starting the service" ) ; final Intent intent = new Intent ( context , service ) ; intent . putExtra ( DfuBaseService . EXTRA_DEVICE_ADDRESS , deviceAddress ) ; intent . putExtra ( DfuBaseService . EXTRA_DEVICE_NAME , deviceName ) ; intent . putExtra ( DfuBaseService . EXTRA_DISABLE_NOTIFICATION , disableNotification ) ; intent . putExtra ( DfuBaseService . EXTRA_FOREGROUND_SERVICE , startAsForegroundService ) ; intent . putExtra ( DfuBaseService . EXTRA_FILE_MIME_TYPE , mimeType ) ; intent . putExtra ( DfuBaseService . EXTRA_FILE_TYPE , fileType ) ; intent . putExtra ( DfuBaseService . EXTRA_FILE_URI , fileUri ) ; intent . putExtra ( DfuBaseService . EXTRA_FILE_PATH , filePath ) ; intent . putExtra ( DfuBaseService . EXTRA_FILE_RES_ID , fileResId ) ; intent . putExtra ( DfuBaseService . EXTRA_INIT_FILE_URI , initFileUri ) ; intent . putExtra ( DfuBaseService . EXTRA_INIT_FILE_PATH , initFilePath ) ; intent . putExtra ( DfuBaseService . EXTRA_INIT_FILE_RES_ID , initFileResId ) ; intent . putExtra ( DfuBaseService . EXTRA_KEEP_BOND , keepBond ) ; intent . putExtra ( DfuBaseService . EXTRA_RESTORE_BOND , restoreBond ) ; intent . putExtra ( DfuBaseService . EXTRA_FORCE_DFU , forceDfu ) ; intent . putExtra ( DfuBaseService . EXTRA_DISABLE_RESUME , disableResume ) ; intent . putExtra ( DfuBaseService . EXTRA_MAX_DFU_ATTEMPTS , numberOfRetries ) ; intent . putExtra ( DfuBaseService . EXTRA_MBR_SIZE , mbrSize ) ; if ( mtu > 0 ) intent . putExtra ( DfuBaseService . EXTRA_MTU , mtu ) ; intent . putExtra ( DfuBaseService . EXTRA_CURRENT_MTU , currentMtu ) ; intent . putExtra ( DfuBaseService . EXTRA_UNSAFE_EXPERIMENTAL_BUTTONLESS_DFU , enableUnsafeExperimentalButtonlessDfu ) ; // noinspection StatementWithEmptyBody
if ( packetReceiptNotificationsEnabled != null ) { intent . putExtra ( DfuBaseService . EXTRA_PACKET_RECEIPT_NOTIFICATIONS_ENABLED , packetReceiptNotificationsEnabled ) ; intent . putExtra ( DfuBaseService . EXTRA_PACKET_RECEIPT_NOTIFICATIONS_VALUE , numberOfPackets ) ; } else { // For backwards compatibility :
// If the setPacketsReceiptNotificationsEnabled ( boolean ) has not been called , the PRN state and value are taken from
// SharedPreferences the way they were read in DFU Library in 1.0.3 and before , or set to default values .
// Default values : PRNs enabled on Android 4.3 - 5.1 and disabled starting from Android 6.0 . Default PRN value is 12.
} if ( legacyDfuUuids != null ) intent . putExtra ( DfuBaseService . EXTRA_CUSTOM_UUIDS_FOR_LEGACY_DFU , legacyDfuUuids ) ; if ( secureDfuUuids != null ) intent . putExtra ( DfuBaseService . EXTRA_CUSTOM_UUIDS_FOR_SECURE_DFU , secureDfuUuids ) ; if ( experimentalButtonlessDfuUuids != null ) intent . putExtra ( DfuBaseService . EXTRA_CUSTOM_UUIDS_FOR_EXPERIMENTAL_BUTTONLESS_DFU , experimentalButtonlessDfuUuids ) ; if ( buttonlessDfuWithoutBondSharingUuids != null ) intent . putExtra ( DfuBaseService . EXTRA_CUSTOM_UUIDS_FOR_BUTTONLESS_DFU_WITHOUT_BOND_SHARING , buttonlessDfuWithoutBondSharingUuids ) ; if ( buttonlessDfuWithBondSharingUuids != null ) intent . putExtra ( DfuBaseService . EXTRA_CUSTOM_UUIDS_FOR_BUTTONLESS_DFU_WITH_BOND_SHARING , buttonlessDfuWithBondSharingUuids ) ; if ( Build . VERSION . SDK_INT >= Build . VERSION_CODES . O && startAsForegroundService ) { // On Android Oreo and above the service must be started as a foreground service to make it accessible from
// a killed application .
context . startForegroundService ( intent ) ; } else { context . startService ( intent ) ; } return new DfuServiceController ( context ) ;
|
public class FeatureState { /** * Sets a new value for the given parameter . */
public FeatureState setParameter ( String name , String value ) { } }
|
if ( value != null ) { this . parameters . put ( name , value ) ; } else { this . parameters . remove ( name ) ; } return this ;
|
public class ArrayUtil { /** * Basic sort for small arrays of int . */
public static void sortArray ( int [ ] array ) { } }
|
boolean swapped ; do { swapped = false ; for ( int i = 0 ; i < array . length - 1 ; i ++ ) { if ( array [ i ] > array [ i + 1 ] ) { int temp = array [ i + 1 ] ; array [ i + 1 ] = array [ i ] ; array [ i ] = temp ; swapped = true ; } } } while ( swapped ) ;
|
public class ZipFileSubsystemInputStreamSources { /** * Adds all file sources in the specified zip file .
* @ param file
* @ throws IOException */
public void addAllSubsystemFileSourcesFromZipFile ( File file ) throws IOException { } }
|
try ( ZipFile zip = new ZipFile ( file ) ) { // extract subsystem template and schema , if present
if ( zip . getEntry ( "subsystem-templates" ) != null ) { Enumeration < ? extends ZipEntry > entries = zip . entries ( ) ; while ( entries . hasMoreElements ( ) ) { ZipEntry entry = entries . nextElement ( ) ; if ( ! entry . isDirectory ( ) ) { String entryName = entry . getName ( ) ; if ( entryName . startsWith ( "subsystem-templates/" ) ) { addSubsystemFileSource ( entryName . substring ( "subsystem-templates/" . length ( ) ) , file , entry ) ; } } } } }
|
public class JpaSource { /** * Return an iterator that iterates over this source .
* @ return a source iterator
* @ see JpaIterator */
public Iterator iterator ( ) { } }
|
Query readAll = m_em . createQuery ( createReadAllQuery ( m_entityClass . getSimpleName ( ) ) ) ; return new JpaIterator ( readAll , m_batchSize ) ;
|
public class NmeaStreamProcessor { /** * Returns the index of the earliest timestamp ( PGHP ) line . If none found
* returns null .
* @ param lines
* @ return */
private static Integer getEarliestTimestampLineIndex ( List < LineAndTime > lines ) { } }
|
Integer i = 0 ; for ( LineAndTime line : lines ) { if ( isExactEarthTimestamp ( line . getLine ( ) ) ) return i ; else i ++ ; } return null ;
|
public class ObjectFactory { /** * Create an instance of { @ link JAXBElement } { @ code < } { @ link SurfacePropertyType } { @ code > }
* @ param value
* Java instance representing xml element ' s value .
* @ return
* the new instance of { @ link JAXBElement } { @ code < } { @ link SurfacePropertyType } { @ code > } */
@ XmlElementDecl ( namespace = "http://www.opengis.net/gml" , name = "baseSurface" ) public JAXBElement < SurfacePropertyType > createBaseSurface ( SurfacePropertyType value ) { } }
|
return new JAXBElement < SurfacePropertyType > ( _BaseSurface_QNAME , SurfacePropertyType . class , null , value ) ;
|
public class UpdateRecoveryPointLifecycleRequestMarshaller { /** * Marshall the given parameter object . */
public void marshall ( UpdateRecoveryPointLifecycleRequest updateRecoveryPointLifecycleRequest , ProtocolMarshaller protocolMarshaller ) { } }
|
if ( updateRecoveryPointLifecycleRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( updateRecoveryPointLifecycleRequest . getBackupVaultName ( ) , BACKUPVAULTNAME_BINDING ) ; protocolMarshaller . marshall ( updateRecoveryPointLifecycleRequest . getRecoveryPointArn ( ) , RECOVERYPOINTARN_BINDING ) ; protocolMarshaller . marshall ( updateRecoveryPointLifecycleRequest . getLifecycle ( ) , LIFECYCLE_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
|
public class AllRowsReader { /** * Wait for all tasks to finish .
* @ param futures
* @ return true if all tasks returned true or false otherwise . */
private boolean waitForTasksToFinish ( ) throws Exception { } }
|
Boolean succeeded = true ; // Tracking state for multiple exceptions , if any
List < StackTraceElement > stackTraces = new ArrayList < StackTraceElement > ( ) ; StringBuilder sb = new StringBuilder ( ) ; int exCount = 0 ; for ( Future < Boolean > future : futures ) { try { if ( ! future . get ( ) ) { cancel ( ) ; succeeded = false ; } } catch ( Exception e ) { error . compareAndSet ( null , e ) ; cancel ( ) ; succeeded = false ; exCount ++ ; sb . append ( "ex" + exCount + ": " ) . append ( e . getMessage ( ) ) . append ( "\n" ) ; StackTraceElement [ ] stackTrace = e . getStackTrace ( ) ; if ( stackTrace != null && stackTrace . length > 0 ) { StackTraceElement delimiterSE = new StackTraceElement ( "StackTrace: ex" + exCount , "" , "" , 0 ) ; stackTraces . add ( delimiterSE ) ; for ( StackTraceElement se : stackTrace ) { stackTraces . add ( se ) ; } } } } if ( this . rowFunction instanceof Flushable ) { ( ( Flushable ) rowFunction ) . flush ( ) ; } if ( exCount > 0 ) { String exMessage = sb . toString ( ) ; StackTraceElement [ ] seArray = stackTraces . toArray ( new StackTraceElement [ stackTraces . size ( ) ] ) ; Exception ex = new Exception ( exMessage ) ; ex . setStackTrace ( seArray ) ; throw ex ; } return succeeded ;
|
public class PasswordHash { /** * Returns a salted PBKDF2 hash of the password .
* @ param password
* the password to hash
* @ return a salted PBKDF2 hash of the password
* @ throws NoSuchAlgorithmException if jdk does not support the algorithm
* @ throws InvalidKeySpecException if the password or salt are invalid */
public static String createHash ( char [ ] password ) throws NoSuchAlgorithmException , InvalidKeySpecException { } }
|
// Generate a random salt
SecureRandom random = new SecureRandom ( ) ; byte [ ] salt = new byte [ SALT_BYTE_SIZE ] ; random . nextBytes ( salt ) ; // Hash the password
byte [ ] hash = pbkdf2 ( password , salt , PBKDF2_ITERATIONS , HASH_BYTE_SIZE ) ; // format iterations : salt : hash
return PBKDF2_ITERATIONS + ":" + toHex ( salt ) + ":" + toHex ( hash ) ;
|
public class CMM_GTAnalysis { /** * Calculates the relative number of errors being caused by the underlying cluster model
* @ return quality of the model */
public double getModelQuality ( ) { } }
|
for ( int p = 0 ; p < numPoints ; p ++ ) { CMMPoint cmdp = cmmpoints . get ( p ) ; for ( int hc = 0 ; hc < numGTClusters ; hc ++ ) { if ( gtClustering . get ( hc ) . getGroundTruth ( ) != cmdp . trueClass ) { if ( gtClustering . get ( hc ) . getInclusionProbability ( cmdp ) >= 1 ) { if ( ! cmdp . isNoise ( ) ) pointErrorByModel ++ ; else noiseErrorByModel ++ ; break ; } } } } if ( debug ) System . out . println ( "Error by model: noise " + noiseErrorByModel + " point " + pointErrorByModel ) ; return 1 - ( ( pointErrorByModel + noiseErrorByModel ) / ( double ) numPoints ) ;
|
public class AjaxWPaginationExample { /** * Override preparePaintComponent in order to set up the example data the first time through .
* @ param request the request being responded to . */
@ Override protected void preparePaintComponent ( final Request request ) { } }
|
super . preparePaintComponent ( request ) ; if ( ! isInitialised ( ) ) { List < Serializable > items = new ArrayList < > ( ) ; items . add ( new SimpleTableBean ( "A" , "none" , "thing" ) ) ; items . add ( new SimpleTableBean ( "B" , "some" , "thing2" ) ) ; items . add ( new SimpleTableBean ( "C" , "little" , "thing3" ) ) ; items . add ( new SimpleTableBean ( "D" , "lots" , "thing4" ) ) ; items . add ( new SimpleTableBean ( "E" , "none" , "thing5" ) ) ; items . add ( new SimpleTableBean ( "F" , "some" , "thing6" ) ) ; items . add ( new SimpleTableBean ( "G" , "little" , "thing7" ) ) ; items . add ( new SimpleTableBean ( "H" , "lots" , "thing8" ) ) ; items . add ( new SimpleTableBean ( "I" , "none" , "thing9" ) ) ; items . add ( new SimpleTableBean ( "J" , "some" , "thing10" ) ) ; items . add ( new SimpleTableBean ( "K" , "little" , "thing11" ) ) ; items . add ( new SimpleTableBean ( "L" , "lots" , "thing12" ) ) ; table . setDataModel ( new SimpleBeanListTableDataModel ( new String [ ] { "name" , "type" , "thing" } , items ) ) ; setInitialised ( true ) ; }
|
public class FormalParameterBuilderImpl { /** * Replies the JvmIdentifiable that corresponds to the formal parameter .
* @ param container the feature call that is supposed to contains the replied identifiable element . */
public void setReferenceInto ( XFeatureCall container ) { } }
|
JvmVoid jvmVoid = this . jvmTypesFactory . createJvmVoid ( ) ; if ( jvmVoid instanceof InternalEObject ) { final InternalEObject jvmVoidProxy = ( InternalEObject ) jvmVoid ; final EObject param = getSarlFormalParameter ( ) ; final Resource resource = param . eResource ( ) ; // Get the derived object
final SarlFormalParameter jvmParam = getAssociatedElement ( SarlFormalParameter . class , param , resource ) ; // Set the proxy URI
final URI uri = EcoreUtil2 . getNormalizedURI ( jvmParam ) ; jvmVoidProxy . eSetProxyURI ( uri ) ; } container . setFeature ( jvmVoid ) ;
|
public class DoubleTupleDistanceFunctions { /** * Computes the squared Euclidean distance between the given tuples
* @ param t0 The first tuple
* @ param t1 The second tuple
* @ return The squared distance
* @ throws IllegalArgumentException If the given tuples do not
* have the same { @ link Tuple # getSize ( ) size } */
public static double computeEuclideanSquared ( DoubleTuple t0 , DoubleTuple t1 ) { } }
|
Utils . checkForEqualSize ( t0 , t1 ) ; double sum = 0 ; int n = t0 . getSize ( ) ; for ( int i = 0 ; i < n ; i ++ ) { double value0 = t0 . get ( i ) ; double value1 = t1 . get ( i ) ; double d = value1 - value0 ; double dd = d * d ; sum += dd ; } return sum ;
|
public class PAbstractObject { /** * Get a property as a double or defaultValue .
* @ param key the property name
* @ param defaultValue the default value */
@ Override public final Double optDouble ( final String key , final Double defaultValue ) { } }
|
Double result = optDouble ( key ) ; return result == null ? defaultValue : result ;
|
public class NewChunk { /** * Slow - path append data */
private void append2slowUUID ( ) { } }
|
final int CHUNK_SZ = 1 << H2O . LOG_CHK ; if ( _sparseLen > CHUNK_SZ ) throw new ArrayIndexOutOfBoundsException ( _sparseLen ) ; if ( _ds == null && _ls != null ) { // This can happen for columns with all NAs and then a UUID
_xs = null ; alloc_doubles ( _sparseLen ) ; Arrays . fill ( _ls , C16Chunk . _LO_NA ) ; Arrays . fill ( _ds , Double . longBitsToDouble ( C16Chunk . _HI_NA ) ) ; } if ( _ls != null && _ls . length > 0 ) { _ls = MemoryManager . arrayCopyOf ( _ls , _sparseLen << 1 ) ; _ds = MemoryManager . arrayCopyOf ( _ds , _sparseLen << 1 ) ; } else { alloc_mantissa ( 4 ) ; alloc_doubles ( 4 ) ; } assert _sparseLen == 0 || _ls . length > _sparseLen : "_ls.length = " + _ls . length + ", _sparseLen = " + _sparseLen ;
|
public class CudaDirectProvider { /** * This method provides PointersPair to memory chunk specified by AllocationShape
* @ param shape shape of desired memory chunk
* @ param point target AllocationPoint structure
* @ param location either HOST or DEVICE
* @ return */
@ Override public PointersPair malloc ( AllocationShape shape , AllocationPoint point , AllocationStatus location ) { } }
|
// log . info ( " shape onCreate : { } , target : { } " , shape , location ) ;
switch ( location ) { case HOST : { long reqMem = AllocationUtils . getRequiredMemory ( shape ) ; // FIXME : this is WRONG , and directly leads to memleak
if ( reqMem < 1 ) reqMem = 1 ; val pointer = nativeOps . mallocHost ( reqMem , 0 ) ; if ( pointer == null ) throw new RuntimeException ( "Can't allocate [HOST] memory: " + reqMem + "; threadId: " + Thread . currentThread ( ) . getId ( ) ) ; // log . info ( " Host allocation , Thread id : { } , ReqMem : { } , Pointer : { } " , Thread . currentThread ( ) . getId ( ) , reqMem , pointer ! = null ? pointer . address ( ) : null ) ;
val hostPointer = new CudaPointer ( pointer ) ; val devicePointerInfo = new PointersPair ( ) ; devicePointerInfo . setDevicePointer ( new CudaPointer ( hostPointer , reqMem ) ) ; devicePointerInfo . setHostPointer ( new CudaPointer ( hostPointer , reqMem ) ) ; point . setPointers ( devicePointerInfo ) ; point . setAllocationStatus ( AllocationStatus . HOST ) ; MemoryTracker . getInstance ( ) . incrementAllocatedHostAmount ( reqMem ) ; return devicePointerInfo ; } case DEVICE : { // cudaMalloc call
val deviceId = AtomicAllocator . getInstance ( ) . getDeviceId ( ) ; long reqMem = AllocationUtils . getRequiredMemory ( shape ) ; // FIXME : this is WRONG , and directly leads to memleak
if ( reqMem < 1 ) reqMem = 1 ; AllocationsTracker . getInstance ( ) . markAllocated ( AllocationKind . GENERAL , deviceId , reqMem ) ; var pointer = nativeOps . mallocDevice ( reqMem , deviceId , 0 ) ; if ( pointer == null ) { // try to purge stuff if we ' re low on memory
purgeCache ( deviceId ) ; // call for gc
Nd4j . getMemoryManager ( ) . invokeGc ( ) ; pointer = nativeOps . mallocDevice ( reqMem , deviceId , 0 ) ; if ( pointer == null ) return null ; } val devicePointer = new CudaPointer ( pointer ) ; var devicePointerInfo = point . getPointers ( ) ; if ( devicePointerInfo == null ) devicePointerInfo = new PointersPair ( ) ; devicePointerInfo . setDevicePointer ( new CudaPointer ( devicePointer , reqMem ) ) ; point . setAllocationStatus ( AllocationStatus . DEVICE ) ; point . setDeviceId ( deviceId ) ; MemoryTracker . getInstance ( ) . incrementAllocatedAmount ( deviceId , reqMem ) ; return devicePointerInfo ; } default : throw new IllegalStateException ( "Unsupported location for malloc: [" + location + "]" ) ; }
|
public class XMLUtils { /** * Returns a non - validating XML parser . The parser ignores both DTDs and XSDs .
* @ return An XML parser in the form of a DocumentBuilder */
public static DocumentBuilder getXmlParser ( ) { } }
|
DocumentBuilder db = null ; try { DocumentBuilderFactory dbf = DocumentBuilderFactory . newInstance ( ) ; dbf . setValidating ( false ) ; // Disable DTD loading and validation
// See http : / / stackoverflow . com / questions / 155101 / make - documentbuilder - parse - ignore - dtd - references
dbf . setFeature ( "http://apache.org/xml/features/nonvalidating/load-dtd-grammar" , false ) ; dbf . setFeature ( "http://apache.org/xml/features/nonvalidating/load-external-dtd" , false ) ; db = dbf . newDocumentBuilder ( ) ; db . setErrorHandler ( new SAXErrorHandler ( ) ) ; } catch ( ParserConfigurationException e ) { System . err . printf ( "%s: Unable to create XML parser\n" , XMLUtils . class . getName ( ) ) ; e . printStackTrace ( ) ; } catch ( UnsupportedOperationException e ) { System . err . printf ( "%s: API error while setting up XML parser. Check your JAXP version\n" , XMLUtils . class . getName ( ) ) ; e . printStackTrace ( ) ; } return db ;
|
public class CompoundServiceFilter { /** * Calls filters ' filtrate ( ) method . Any exception stops the process . */
@ Override public void filtrate ( DataBinder parameters ) { } }
|
first . filtrate ( parameters ) ; second . filtrate ( parameters ) ;
|
public class AbstractNotificationBuilder { /** * Sets the type of the push notification being sent .
* As of Windows Phone OS 7.0 , the supported types are :
* < ul >
* < li > token ( for Tile messages ) < / li >
* < li > toast < / li >
* < li > raw < / li >
* < / ul >
* This method should probably not be called directly , as the concrete
* builder class will set the appropriate notification type .
* @ param type the notification type
* @ return this */
public A notificationType ( String type ) { } }
|
this . headers . add ( Pair . of ( "X-WindowsPhone-Target" , type ) ) ; return ( A ) this ;
|
public class AbstractTeam { /** * Kill all players and the coach ( if connected ) . */
public void killAll ( ) { } }
|
for ( int i = 0 ; i < size ( ) ; i ++ ) { if ( i == 0 ) { players [ i ] . bye ( ) ; players [ i ] . stopRunning ( ) ; } else if ( i >= 1 ) { try { players [ i ] . bye ( ) ; players [ i ] . stopRunning ( ) ; } catch ( Exception ex ) { players [ i ] . handleError ( ex . getMessage ( ) ) ; } } pause ( 500 ) ; } if ( hasCoach ) { try { coach . bye ( ) ; coach . stopRunning ( ) ; } catch ( Exception ex ) { coach . handleError ( ex . getMessage ( ) ) ; } pause ( 500 ) ; }
|
public class MavenModelScannerPlugin { /** * Create plugin descriptors for the given plugins .
* @ param plugins
* The plugins .
* @ param context
* The scanner context .
* @ return The plugin descriptors . */
private List < MavenPluginDescriptor > createMavenPluginDescriptors ( List < Plugin > plugins , ScannerContext context ) { } }
|
Store store = context . getStore ( ) ; List < MavenPluginDescriptor > pluginDescriptors = new ArrayList < > ( ) ; for ( Plugin plugin : plugins ) { MavenPluginDescriptor mavenPluginDescriptor = store . create ( MavenPluginDescriptor . class ) ; MavenArtifactDescriptor artifactDescriptor = getArtifactResolver ( context ) . resolve ( new PluginCoordinates ( plugin ) , context ) ; mavenPluginDescriptor . setArtifact ( artifactDescriptor ) ; mavenPluginDescriptor . setInherited ( plugin . isInherited ( ) ) ; mavenPluginDescriptor . getDeclaresDependencies ( ) . addAll ( getDependencies ( mavenPluginDescriptor , plugin . getDependencies ( ) , PluginDependsOnDescriptor . class , context ) ) ; addPluginExecutions ( mavenPluginDescriptor , plugin , store ) ; addConfiguration ( mavenPluginDescriptor , ( Xpp3Dom ) plugin . getConfiguration ( ) , store ) ; pluginDescriptors . add ( mavenPluginDescriptor ) ; } return pluginDescriptors ;
|
public class SipSessionKey { /** * Sets the to tag on the key when we receive a response .
* We recompute the session id only for derived session otherwise the id will change
* when the a request is received or sent and the response is sent back or received which should not happen
* See TCK test SipSessionListenerTest . testSessionDestroyed001
* @ param toTag the toTag to set
* @ param recomputeSessionId check if the sessionid need to be recomputed */
public void setToTag ( String toTag , boolean recomputeSessionId ) { } }
|
this . toTag = toTag ; if ( toTag != null && recomputeSessionId ) { // Issue 2365 : to tag needed for getApplicationSession ( ) . getSipSession ( < sessionId > ) to return forked session and not the parent one
computeToString ( ) ; }
|
public class JmxScraper { /** * Get a list of mbeans on host _ port and scrape their values .
* Values are passed to the receiver in a single thread . */
public void doScrape ( ) throws Exception { } }
|
MBeanServerConnection beanConn ; JMXConnector jmxc = null ; if ( jmxUrl . isEmpty ( ) ) { beanConn = ManagementFactory . getPlatformMBeanServer ( ) ; } else { Map < String , Object > environment = new HashMap < String , Object > ( ) ; if ( username != null && username . length ( ) != 0 && password != null && password . length ( ) != 0 ) { String [ ] credent = new String [ ] { username , password } ; environment . put ( javax . management . remote . JMXConnector . CREDENTIALS , credent ) ; } if ( ssl ) { environment . put ( Context . SECURITY_PROTOCOL , "ssl" ) ; SslRMIClientSocketFactory clientSocketFactory = new SslRMIClientSocketFactory ( ) ; environment . put ( RMIConnectorServer . RMI_CLIENT_SOCKET_FACTORY_ATTRIBUTE , clientSocketFactory ) ; environment . put ( "com.sun.jndi.rmi.factory.socket" , clientSocketFactory ) ; } jmxc = JMXConnectorFactory . connect ( new JMXServiceURL ( jmxUrl ) , environment ) ; beanConn = jmxc . getMBeanServerConnection ( ) ; } try { // Query MBean names , see # 89 for reasons queryMBeans ( ) is used instead of queryNames ( )
Set < ObjectInstance > mBeanNames = new HashSet ( ) ; for ( ObjectName name : whitelistObjectNames ) { mBeanNames . addAll ( beanConn . queryMBeans ( name , null ) ) ; } for ( ObjectName name : whitelistObjectInstances ) { mBeanNames . add ( beanConn . getObjectInstance ( name ) ) ; } for ( ObjectName name : blacklistObjectNames ) { mBeanNames . removeAll ( beanConn . queryMBeans ( name , null ) ) ; } for ( ObjectName name : blacklistObjectInstances ) { mBeanNames . remove ( beanConn . getObjectInstance ( name ) ) ; } for ( ObjectInstance name : mBeanNames ) { long start = System . nanoTime ( ) ; scrapeBean ( beanConn , name . getObjectName ( ) ) ; logger . fine ( "TIME: " + ( System . nanoTime ( ) - start ) + " ns for " + name . getObjectName ( ) . toString ( ) ) ; } } finally { if ( jmxc != null ) { jmxc . close ( ) ; } }
|
public class JSpinners { /** * Disable dragging for the given spinner , by removing all
* mouse motion listeners that are { @ link SpinnerDraggingHandler } s
* from the spinner buttons
* @ param spinner The spinner */
private static void disableSpinnerDragging ( JSpinner spinner ) { } }
|
int n = spinner . getComponentCount ( ) ; for ( int i = 0 ; i < n ; i ++ ) { Component c = spinner . getComponent ( i ) ; String name = c . getName ( ) ; if ( "Spinner.nextButton" . equals ( name ) || "Spinner.previousButton" . equals ( name ) ) { MouseMotionListener mouseMotionListeners [ ] = c . getMouseMotionListeners ( ) ; for ( MouseMotionListener m : mouseMotionListeners ) { if ( m instanceof SpinnerDraggingHandler ) { c . removeMouseMotionListener ( m ) ; } } MouseListener mouseListeners [ ] = c . getMouseListeners ( ) ; for ( MouseListener m : mouseListeners ) { if ( m instanceof SpinnerDraggingHandler ) { c . removeMouseListener ( m ) ; } } } }
|
public class InjectReactiveMojo { /** * Expects a directory . */
private void walkDir ( File dir ) { } }
|
walkDir ( dir , new FileFilter ( ) { @ Override public boolean accept ( File pathname ) { return ( pathname . isFile ( ) && pathname . getName ( ) . endsWith ( ".class" ) ) ; } } , new FileFilter ( ) { @ Override public boolean accept ( File pathname ) { return ( pathname . isDirectory ( ) ) ; } } ) ;
|
public class TemplateDrivenMultiBranchProject { /** * Sets various implementation - specific fields and forwards wrapped req / rsp objects on to the
* { @ link # template } ' s { @ link AbstractProject # doConfigSubmit ( StaplerRequest , StaplerResponse ) } method .
* < br >
* { @ inheritDoc } */
@ Override public void submit ( StaplerRequest req , StaplerResponse rsp ) throws ServletException , Descriptor . FormException , IOException { } }
|
super . submit ( req , rsp ) ; makeDisabled ( req . getParameter ( "disable" ) != null ) ; template . doConfigSubmit ( new TemplateStaplerRequestWrapper ( req ) , new TemplateStaplerResponseWrapper ( req . getStapler ( ) , rsp ) ) ; ItemListener . fireOnUpdated ( this ) ; // notify the queue as the project might be now tied to different node
Jenkins . getActiveInstance ( ) . getQueue ( ) . scheduleMaintenance ( ) ; // this is to reflect the upstream build adjustments done above
Jenkins . getActiveInstance ( ) . rebuildDependencyGraphAsync ( ) ;
|
public class Memoize { /** * Convert a Supplier into one that caches it ' s result
* @ param s Supplier to memoise
* @ param cache Cachable to store the results
* @ return Memoised Supplier */
public static < T > Function0 < T > memoizeSupplier ( final Supplier < T > s , final Cacheable < T > cache ) { } }
|
return ( ) -> cache . soften ( ) . computeIfAbsent ( "k" , a -> s . get ( ) ) ;
|
public class Ifc4PackageImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
@ Override public EClass getIfcRelAssigns ( ) { } }
|
if ( ifcRelAssignsEClass == null ) { ifcRelAssignsEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( Ifc4Package . eNS_URI ) . getEClassifiers ( ) . get ( 518 ) ; } return ifcRelAssignsEClass ;
|
public class CachedScheduledThreadPool { /** * After initialDelay executes commands using collections iterator until either iterator has no more commands or command throws exception .
* @ param initialDelay
* @ param period
* @ param unit
* @ param commands
* @ return
* @ see java . util . concurrent . ScheduledThreadPoolExecutor # scheduleAtFixedDelay ( java . lang . Runnable , long , long , java . util . concurrent . TimeUnit ) */
public ScheduledFuture < ? > iterateAtFixedRate ( long initialDelay , long period , TimeUnit unit , Collection < Runnable > commands ) { } }
|
return iterateAtFixedRate ( initialDelay , period , unit , commands . iterator ( ) ) ;
|
public class RedoLog { /** * Initializes the { @ link # out } stream if it is not yet set .
* @ throws IOException if an error occurs while creating the
* output stream . */
private synchronized void initOut ( ) throws IOException { } }
|
if ( out == null ) { OutputStream os = new IndexOutputStream ( dir . createOutput ( REDO_LOG ) ) ; out = new BufferedWriter ( new OutputStreamWriter ( os ) ) ; }
|
public class SslContextFactory { /** * Does an object array include an object .
* @ param arr The array
* @ param obj The object */
private static boolean contains ( Object [ ] arr , Object obj ) { } }
|
for ( Object o : arr ) { if ( o . equals ( obj ) ) return true ; } return false ;
|
public class ChronosServerWatcher { /** * Deal with the connection event .
* @ param event the ZooKeeper event . */
@ Override protected void processConnection ( WatchedEvent event ) { } }
|
super . processConnection ( event ) ; switch ( event . getState ( ) ) { case Disconnected : if ( beenActiveMaster ) { LOG . fatal ( hostPort . getHostPort ( ) + " disconnected from ZooKeeper, stop serving and exit immediately" ) ; System . exit ( 0 ) ; } else { LOG . warn ( hostPort . getHostPort ( ) + " disconnected from ZooKeeper, wait to sync and try to become active master" ) ; } break ; default : break ; }
|
public class SearchCommand { /** * - - - - - builder methods - - - - - */
@ Override public org . structr . core . app . Query < T > disableSorting ( ) { } }
|
this . doNotSort = true ; return this ;
|
public class MongoNativeExtractor { /** * Gets shards .
* @ param collection the collection
* @ return the shards */
private Map < String , String [ ] > getShards ( DBCollection collection ) { } }
|
DB config = collection . getDB ( ) . getSisterDB ( "config" ) ; DBCollection configShards = config . getCollection ( "shards" ) ; DBCursor cursorShards = configShards . find ( ) ; Map < String , String [ ] > map = new HashMap < > ( ) ; while ( cursorShards . hasNext ( ) ) { DBObject currentShard = cursorShards . next ( ) ; String currentHost = ( String ) currentShard . get ( "host" ) ; int slashIndex = currentHost . indexOf ( "/" ) ; if ( slashIndex > 0 ) { map . put ( ( String ) currentShard . get ( MONGO_DEFAULT_ID ) , currentHost . substring ( slashIndex + 1 ) . split ( "," ) ) ; } } return map ;
|
public class BootstrapChildFirstURLClassloader { /** * Any changes must be made to both sources */
@ Override protected synchronized Class < ? > loadClass ( String name , boolean resolve ) throws ClassNotFoundException { } }
|
synchronized ( getClassLoadingLock ( name ) ) { Class < ? > result = null ; if ( name == null || name . length ( ) == 0 ) return null ; result = findLoadedClass ( name ) ; if ( result == null ) { if ( name . regionMatches ( 0 , BootstrapChildFirstJarClassloader . KERNEL_BOOT_CLASS_PREFIX , 0 , BootstrapChildFirstJarClassloader . KERNEL_BOOT_PREFIX_LENGTH ) ) { result = super . loadClass ( name , resolve ) ; } else { try { // Try to load the class from this classpath
result = findClass ( name ) ; } catch ( ClassNotFoundException cnfe ) { result = super . loadClass ( name , resolve ) ; } } } return result ; }
|
public class CustomTypeAdapterFactory { /** * Override this to define how this is serialized in { @ code toSerialize } to
* the outgoing JSON stream . */
protected void write ( JsonWriter out , T value , TypeAdapter < JsonElement > elementAdapter , TypeAdapter < T > delegate ) throws IOException { } }
|
JsonElement tree = delegate . toJsonTree ( value ) ; beforeWrite ( value , tree ) ; elementAdapter . write ( out , tree ) ;
|
public class IterableLens { /** * A lens focusing on the tail of an { @ link Iterable } .
* @ param < A > the Iterable element type
* @ return a lens focusing on the tail of an { @ link Iterable } */
public static < A > Lens . Simple < Iterable < A > , Iterable < A > > tail ( ) { } }
|
return simpleLens ( Tail :: tail , fn2 ( Head . < A > head ( ) . andThen ( o -> o . fmap ( cons ( ) ) . orElse ( id ( ) ) ) ) . toBiFunction ( ) ) ;
|
public class Encoder { /** * Updates a pseudo - Boolean encoding .
* @ param s the solver
* @ param rhs the new right hand side
* @ throws IllegalStateException if the pseudo - Boolean encoding is unknown */
public void updatePB ( final MiniSatStyleSolver s , int rhs ) { } }
|
switch ( this . pbEncoding ) { case SWC : this . swc . update ( s , rhs ) ; break ; default : throw new IllegalStateException ( "Unknown pseudo-Boolean encoding: " + this . pbEncoding ) ; }
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.