signature stringlengths 43 39.1k | implementation stringlengths 0 450k |
|---|---|
public class FirestoreClient { /** * Deletes a document .
* < p > Sample code :
* < pre > < code >
* try ( FirestoreClient firestoreClient = FirestoreClient . create ( ) ) {
* AnyPathName name = AnyPathName . of ( " [ PROJECT ] " , " [ DATABASE ] " , " [ DOCUMENT ] " , " [ ANY _ PATH ] " ) ;
* firestoreClient . deleteDocument ( name . toString ( ) ) ;
* < / code > < / pre >
* @ param name The resource name of the Document to delete . In the format :
* ` projects / { project _ id } / databases / { database _ id } / documents / { document _ path } ` .
* @ throws com . google . api . gax . rpc . ApiException if the remote call fails */
public final void deleteDocument ( String name ) { } } | DeleteDocumentRequest request = DeleteDocumentRequest . newBuilder ( ) . setName ( name ) . build ( ) ; deleteDocument ( request ) ; |
public class InternalFeaturePropertyAccessor { /** * { @ inheritDoc } */
public boolean canRead ( EvaluationContext context , Object target , String name ) throws AccessException { } } | if ( null == target ) { return false ; } if ( target instanceof InternalFeature ) { InternalFeature feature = ( InternalFeature ) target ; return feature . getAttributes ( ) . containsKey ( name ) || ID_PROPERTY_NAME . equalsIgnoreCase ( name ) ; } else if ( target instanceof AssociationValue ) { AssociationValue associationValue = ( AssociationValue ) target ; return associationValue . getAllAttributes ( ) . containsKey ( name ) || ID_PROPERTY_NAME . equalsIgnoreCase ( name ) ; } return false ; |
public class NodeBuilder { /** * already be initialised , and only aren ' t in the case where we are overflowing the original root node */
private NodeBuilder ensureParent ( ) { } } | if ( parent == null ) { parent = new NodeBuilder ( ) ; parent . child = this ; } if ( parent . upperBound == null ) parent . reset ( EMPTY_BRANCH , upperBound , updateFunction , comparator ) ; return parent ; |
public class FirewallRulesInner { /** * Gets the specified Data Lake Store firewall rule .
* @ param resourceGroupName The name of the Azure resource group .
* @ param accountName The name of the Data Lake Store account .
* @ param firewallRuleName The name of the firewall rule to retrieve .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the observable to the FirewallRuleInner object */
public Observable < FirewallRuleInner > getAsync ( String resourceGroupName , String accountName , String firewallRuleName ) { } } | return getWithServiceResponseAsync ( resourceGroupName , accountName , firewallRuleName ) . map ( new Func1 < ServiceResponse < FirewallRuleInner > , FirewallRuleInner > ( ) { @ Override public FirewallRuleInner call ( ServiceResponse < FirewallRuleInner > response ) { return response . body ( ) ; } } ) ; |
public class BasicFunctionsRuntime { /** * Concatenates its arguments . */
public static List < SoyValueProvider > concatLists ( List < SoyList > args ) { } } | ImmutableList . Builder < SoyValueProvider > flattened = ImmutableList . builder ( ) ; for ( SoyList soyList : args ) { flattened . addAll ( soyList . asJavaList ( ) ) ; } return flattened . build ( ) ; |
public class WMenuItem { /** * Determine if this WMenuItem ' s parent WMenu is on the Request .
* @ param request the request being responded to .
* @ return true if this WMenuItem ' s WMenu is on the Request , otherwise return false . */
protected boolean isMenuPresent ( final Request request ) { } } | WMenu menu = WebUtilities . getAncestorOfClass ( WMenu . class , this ) ; if ( menu != null ) { return menu . isPresent ( request ) ; } return false ; |
public class FieldUtils { /** * Verify that input values are within specified bounds .
* @ param value the value to check
* @ param lowerBound the lower bound allowed for value
* @ param upperBound the upper bound allowed for value
* @ throws IllegalFieldValueException if value is not in the specified bounds */
public static void verifyValueBounds ( DateTimeField field , int value , int lowerBound , int upperBound ) { } } | if ( ( value < lowerBound ) || ( value > upperBound ) ) { throw new IllegalFieldValueException ( field . getType ( ) , Integer . valueOf ( value ) , Integer . valueOf ( lowerBound ) , Integer . valueOf ( upperBound ) ) ; } |
public class METSContentHandler { /** * { @ inheritDoc } */
@ Override public void endElement ( String uri , String localName , String qName ) throws SAXException { } } | // first , deal with the situation when we are processing a block of inline XML
if ( m_inXMLMetadata ) { if ( uri . equals ( METS . uri ) && localName . equals ( "xmlData" ) && m_xmlDataLevel == 0 ) { // finished all xml metadata for this datastream
if ( m_dsId . equals ( "FEDORA-AUDITTRAIL" ) || m_dsId . equals ( "AUDIT" ) ) { // we ' ve been looking at an audit trail . . . set audit record
AuditRecord a = new AuditRecord ( ) ; // In METS each audit record is in its own < digiprovMD >
// element within an < amdSec > . So , pick up the XML ID
// of the < digiprovMD > element for the audit record id .
// This amdSec is treated like a datastream , and each
// digiprovMD is a version , so id was parsed into dsVersId .
a . id = m_auditId ; // m _ dsVersId ;
a . processType = m_auditProcessType ; a . action = m_auditAction ; a . componentID = m_auditComponentID ; a . responsibility = m_auditResponsibility ; a . date = DateUtility . convertStringToDate ( m_auditDate ) ; a . justification = m_auditJustification ; m_obj . getAuditRecords ( ) . add ( a ) ; m_inXMLMetadata = false ; // other stuff is re - initted upon
// startElement for next xml metadata
// element
} else { // Create the right kind of datastream and add to the object
DatastreamXMLMetadata ds = new DatastreamXMLMetadata ( ) ; instantiateXMLDatastream ( ds ) ; m_inXMLMetadata = false ; m_localPrefixMap . clear ( ) ; } } else { // finished an element within inline xml metadata
m_dsXMLBuffer . append ( "</" + qName + ">" ) ; // make sure we know when to pay attention to METS again
if ( uri . equals ( METS . uri ) && localName . equals ( "xmlData" ) ) { m_xmlDataLevel -- ; } if ( m_dsId . equals ( "FEDORA-AUDITTRAIL" ) || m_dsId . equals ( "AUDIT" ) ) { if ( localName . equals ( "action" ) ) { m_auditAction = m_auditBuffer . toString ( ) ; m_auditBuffer = null ; } else if ( localName . equals ( "componentID" ) ) { m_auditComponentID = m_auditBuffer . toString ( ) ; m_auditBuffer = null ; } else if ( localName . equals ( "responsibility" ) ) { m_auditResponsibility = m_auditBuffer . toString ( ) ; m_auditBuffer = null ; } else if ( localName . equals ( "date" ) ) { m_auditDate = m_auditBuffer . toString ( ) ; m_auditBuffer = null ; } else if ( localName . equals ( "justification" ) ) { m_auditJustification = m_auditBuffer . toString ( ) ; m_auditBuffer = null ; } } } // ALL OTHER ELEMENT CASES : we are NOT processing a block of inline XML metadata
} else { if ( m_readingBinaryContent ) { // In the version of METS Fedora uses , FContent assumes base64 - encoded content
if ( uri . equals ( METS . uri ) && localName . equals ( "FContent" ) ) { if ( m_binaryContentTempFile != null ) { try { FileOutputStream os = new FileOutputStream ( m_binaryContentTempFile ) ; // remove all spaces and newlines , this might not be necessary .
String elementStr = m_elementContent . toString ( ) . replaceAll ( "\\s" , "" ) ; byte elementBytes [ ] = Base64 . decode ( elementStr ) ; os . write ( elementBytes ) ; os . close ( ) ; m_dsLocationType = Datastream . DS_LOCATION_TYPE_INTERNAL ; m_dsLocation = DatastreamManagedContent . TEMP_SCHEME + m_binaryContentTempFile . getAbsolutePath ( ) ; instantiateDatastream ( new DatastreamManagedContent ( ) ) ; } catch ( FileNotFoundException fnfe ) { throw new SAXException ( new StreamIOException ( "Unable to open temporary file created for binary content" ) ) ; } catch ( IOException fnfe ) { throw new SAXException ( new StreamIOException ( "Error writing to temporary file created for binary content" ) ) ; } } } m_binaryContentTempFile = null ; m_readingBinaryContent = false ; m_elementContent = null ; // all other cases . . .
} else { if ( m_readingContent ) { // elements for which we were reading regular content
if ( uri . equals ( METS . uri ) && localName . equals ( "name" ) && m_agentRole . equals ( "IPOWNER" ) ) { m_obj . setOwnerId ( m_elementContent . toString ( ) ) ; } else if ( uri . equals ( METS . uri ) && localName . equals ( "agent" ) ) { m_agentRole = null ; } m_readingContent = false ; m_elementContent = null ; } else { // no other processing requirements at this time
} } } |
public class AbstractDocumentationMojo { /** * Execute the mojo on the given set of files .
* @ param files the files
* @ param outputFolder the output directory .
* @ return the error message */
protected String internalExecute ( Map < File , File > files , File outputFolder ) { } } | String firstErrorMessage = null ; for ( final Entry < File , File > entry : files . entrySet ( ) ) { final File inputFile = entry . getKey ( ) ; try { final AbstractMarkerLanguageParser parser = createLanguageParser ( inputFile ) ; final File sourceFolder = entry . getValue ( ) ; final File relativePath = FileSystem . makeRelative ( inputFile , sourceFolder ) ; internalExecute ( sourceFolder , inputFile , relativePath , outputFolder , parser ) ; } catch ( Throwable exception ) { final String errorMessage = formatErrorMessage ( inputFile , exception ) ; getLog ( ) . error ( errorMessage ) ; if ( Strings . isEmpty ( firstErrorMessage ) ) { firstErrorMessage = errorMessage ; } getLog ( ) . debug ( exception ) ; } } return firstErrorMessage ; |
public class Database { /** * Finds a unique result from database , converting the database row to int using default mechanisms .
* Returns empty if there are no results or if single null result is returned .
* @ throws NonUniqueResultException if there are multiple result rows */
public @ NotNull OptionalInt findOptionalInt ( @ NotNull SqlQuery query ) { } } | Optional < Integer > value = findOptional ( Integer . class , query ) ; return value . isPresent ( ) ? OptionalInt . of ( value . get ( ) ) : OptionalInt . empty ( ) ; |
public class ISPNIndexUpdateMonitor { /** * { @ inheritDoc } */
public void onChangeMode ( IndexerIoMode mode ) { } } | if ( mode == IndexerIoMode . READ_WRITE ) { // In READ _ WRITE , the value of UpdateInProgress is changed locally so no need to listen
// to the cache
cache . removeListener ( this ) ; } else { // In READ _ ONLY , the value of UpdateInProgress will be changed remotely , so we have
// no need but to listen to the cache to be notified when the value changes
cache . addListener ( this ) ; } |
public class TruncatedNormalDistributionTypeImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
@ Override public void eSet ( int featureID , Object newValue ) { } } | switch ( featureID ) { case BpsimPackage . TRUNCATED_NORMAL_DISTRIBUTION_TYPE__MAX : setMax ( ( Double ) newValue ) ; return ; case BpsimPackage . TRUNCATED_NORMAL_DISTRIBUTION_TYPE__MEAN : setMean ( ( Double ) newValue ) ; return ; case BpsimPackage . TRUNCATED_NORMAL_DISTRIBUTION_TYPE__MIN : setMin ( ( Double ) newValue ) ; return ; case BpsimPackage . TRUNCATED_NORMAL_DISTRIBUTION_TYPE__STANDARD_DEVIATION : setStandardDeviation ( ( Double ) newValue ) ; return ; } super . eSet ( featureID , newValue ) ; |
public class UserGroupManager { /** * Assign the passed role ID to the user group with the passed ID . < br >
* Note : the role ID must not be checked for consistency
* @ param sUserGroupID
* The ID of the user group to assign the role to .
* @ param sRoleID
* The ID of the role to be assigned
* @ return { @ link EChange # CHANGED } if the passed user group ID was resolved ,
* and the role ID was not already previously contained */
@ Nonnull public EChange assignRoleToUserGroup ( @ Nullable final String sUserGroupID , @ Nonnull @ Nonempty final String sRoleID ) { } } | // Resolve user group
final UserGroup aUserGroup = getOfID ( sUserGroupID ) ; if ( aUserGroup == null ) { AuditHelper . onAuditModifyFailure ( UserGroup . OT , sUserGroupID , "no-such-usergroup-id" , "assign-role" ) ; return EChange . UNCHANGED ; } m_aRWLock . writeLock ( ) . lock ( ) ; try { if ( aUserGroup . assignRole ( sRoleID ) . isUnchanged ( ) ) return EChange . UNCHANGED ; BusinessObjectHelper . setLastModificationNow ( aUserGroup ) ; internalUpdateItem ( aUserGroup ) ; } finally { m_aRWLock . writeLock ( ) . unlock ( ) ; } AuditHelper . onAuditModifySuccess ( UserGroup . OT , "assign-role" , sUserGroupID , sRoleID ) ; // Execute callback as the very last action
m_aCallbacks . forEach ( aCB -> aCB . onUserGroupRoleAssignment ( aUserGroup , sRoleID , true ) ) ; return EChange . CHANGED ; |
public class ReflectedHeap { /** * { @ inheritDoc } */
@ Override @ SuppressWarnings ( "unchecked" ) public Handle < K , V > deleteMax ( ) { } } | if ( size == 0 ) { throw new NoSuchElementException ( ) ; } else if ( size == 1 ) { Handle < K , V > max = free ; free = null ; size -- ; return max ; } else if ( size % 2 == 0 ) { // find max
AddressableHeap . Handle < K , HandleMap < K , V > > maxInner = maxHeap . deleteMin ( ) ; ReflectedHandle < K , V > maxOuter = maxInner . getValue ( ) . outer ; maxOuter . inner = null ; maxOuter . minNotMax = false ; // delete min and keep as free
AddressableHeap . Handle < K , HandleMap < K , V > > minInner = maxInner . getValue ( ) . otherInner ; ReflectedHandle < K , V > minOuter = minInner . getValue ( ) . outer ; minInner . delete ( ) ; minOuter . inner = null ; minOuter . minNotMax = false ; free = minOuter ; size -- ; return maxOuter ; } else { // find max
AddressableHeap . Handle < K , HandleMap < K , V > > maxInner = maxHeap . findMin ( ) ; int c ; if ( comparator == null ) { c = ( ( Comparable < ? super K > ) maxInner . getKey ( ) ) . compareTo ( free . key ) ; } else { c = comparator . compare ( maxInner . getKey ( ) , free . key ) ; } if ( c < 0 ) { Handle < K , V > max = free ; free = null ; size -- ; return max ; } // maxInner is larger
maxInner . delete ( ) ; ReflectedHandle < K , V > maxOuter = maxInner . getValue ( ) . outer ; maxOuter . inner = null ; maxOuter . minNotMax = false ; // delete min
AddressableHeap . Handle < K , HandleMap < K , V > > minInner = maxInner . getValue ( ) . otherInner ; ReflectedHandle < K , V > minOuter = minInner . getValue ( ) . outer ; minInner . delete ( ) ; minOuter . inner = null ; minOuter . minNotMax = false ; // reinsert min with free
insertPair ( minOuter , free ) ; free = null ; size -- ; return maxOuter ; } |
public class CalculateUtils { /** * 计算比率 。 计算结果四舍五入 。
* @ param numerator 分子
* @ param denominator 分母
* @ param scale 保留小数点后位数
* @ return 比率 */
public static double divide ( long numerator , long denominator , int scale ) { } } | BigDecimal numeratorBd = new BigDecimal ( numerator ) ; BigDecimal denominatorBd = new BigDecimal ( denominator ) ; return numeratorBd . divide ( denominatorBd , scale , BigDecimal . ROUND_HALF_UP ) . doubleValue ( ) ; |
public class JdbcCpoTrxAdapter { /** * DOCUMENT ME !
* @ return DOCUMENT ME !
* @ throws CpoException DOCUMENT ME ! */
@ Override protected Connection getWriteConnection ( ) throws CpoException { } } | Connection connection = getStaticConnection ( ) ; setConnectionBusy ( connection ) ; return connection ; |
public class Instant { /** * Obtains an instance of { @ code Instant } using seconds from the
* epoch of 1970-01-01T00:00:00Z and nanosecond fraction of second .
* This method allows an arbitrary number of nanoseconds to be passed in .
* The factory will alter the values of the second and nanosecond in order
* to ensure that the stored nanosecond is in the range 0 to 999,999,999.
* For example , the following will result in the exactly the same instant :
* < pre >
* Instant . ofSeconds ( 3 , 1 ) ;
* Instant . ofSeconds ( 4 , - 999_999_999 ) ;
* Instant . ofSeconds ( 2 , 1000_000_001 ) ;
* < / pre >
* @ param epochSecond the number of seconds from 1970-01-01T00:00:00Z
* @ param nanoAdjustment the nanosecond adjustment to the number of seconds , positive or negative
* @ return an instant , not null
* @ throws DateTimeException if the instant exceeds the maximum or minimum instant
* @ throws ArithmeticException if numeric overflow occurs */
public static Instant ofEpochSecond ( long epochSecond , long nanoAdjustment ) { } } | long secs = Jdk8Methods . safeAdd ( epochSecond , Jdk8Methods . floorDiv ( nanoAdjustment , NANOS_PER_SECOND ) ) ; int nos = Jdk8Methods . floorMod ( nanoAdjustment , NANOS_PER_SECOND ) ; return create ( secs , nos ) ; |
public class ItemTouchHelper { /** * If user drags the view to the edge , trigger a scroll if necessary . */
boolean scrollIfNecessary ( ) { } } | if ( mSelected == null ) { mDragScrollStartTimeInMs = Long . MIN_VALUE ; return false ; } final long now = System . currentTimeMillis ( ) ; final long scrollDuration = mDragScrollStartTimeInMs == Long . MIN_VALUE ? 0 : now - mDragScrollStartTimeInMs ; RecyclerView . LayoutManager lm = mRecyclerView . getLayoutManager ( ) ; if ( mTmpRect == null ) { mTmpRect = new Rect ( ) ; } int scrollX = 0 ; int scrollY = 0 ; lm . calculateItemDecorationsForChild ( mSelected . itemView , mTmpRect ) ; if ( lm . canScrollHorizontally ( ) ) { int curX = ( int ) ( mSelectedStartX + mDx ) ; final int leftDiff = curX - mTmpRect . left - mRecyclerView . getPaddingLeft ( ) ; if ( mDx < 0 && leftDiff < 0 ) { scrollX = leftDiff ; } else if ( mDx > 0 ) { final int rightDiff = curX + mSelected . itemView . getWidth ( ) + mTmpRect . right - ( mRecyclerView . getWidth ( ) - mRecyclerView . getPaddingRight ( ) ) ; if ( rightDiff > 0 ) { scrollX = rightDiff ; } } } if ( lm . canScrollVertically ( ) ) { int curY = ( int ) ( mSelectedStartY + mDy ) ; final int topDiff = curY - mTmpRect . top - mRecyclerView . getPaddingTop ( ) ; if ( mDy < 0 && topDiff < 0 ) { scrollY = topDiff ; } else if ( mDy > 0 ) { final int bottomDiff = curY + mSelected . itemView . getHeight ( ) + mTmpRect . bottom - ( mRecyclerView . getHeight ( ) - mRecyclerView . getPaddingBottom ( ) ) ; if ( bottomDiff > 0 ) { scrollY = bottomDiff ; } } } if ( scrollX != 0 ) { scrollX = mCallback . interpolateOutOfBoundsScroll ( mRecyclerView , mSelected . itemView . getWidth ( ) , scrollX , mRecyclerView . getWidth ( ) , scrollDuration ) ; } if ( scrollY != 0 ) { scrollY = mCallback . interpolateOutOfBoundsScroll ( mRecyclerView , mSelected . itemView . getHeight ( ) , scrollY , mRecyclerView . getHeight ( ) , scrollDuration ) ; } if ( scrollX != 0 || scrollY != 0 ) { if ( mDragScrollStartTimeInMs == Long . MIN_VALUE ) { mDragScrollStartTimeInMs = now ; } mRecyclerView . scrollBy ( scrollX , scrollY ) ; return true ; } mDragScrollStartTimeInMs = Long . MIN_VALUE ; return false ; |
public class Builder { /** * asserts a valid document */
public Document literal ( String text ) { } } | try { return parseString ( text ) ; } catch ( SAXException e ) { throw new RuntimeException ( text , e ) ; } |
public class FactoryBackgroundModel { /** * Creates an instance of { @ link BackgroundMovingGaussian } .
* @ param config Configures the background model
* @ param imageType Type of input image
* @ return new instance of the background model */
public static < T extends ImageBase < T > , Motion extends InvertibleTransform < Motion > > BackgroundMovingGaussian < T , Motion > movingGaussian ( @ Nonnull ConfigBackgroundGaussian config , Point2Transform2Model_F32 < Motion > transform , ImageType < T > imageType ) { } } | config . checkValidity ( ) ; BackgroundMovingGaussian < T , Motion > ret ; switch ( imageType . getFamily ( ) ) { case GRAY : ret = new BackgroundMovingGaussian_SB ( config . learnRate , config . threshold , transform , config . interpolation , imageType . getImageClass ( ) ) ; break ; case PLANAR : ret = new BackgroundMovingGaussian_PL ( config . learnRate , config . threshold , transform , config . interpolation , imageType ) ; break ; case INTERLEAVED : ret = new BackgroundMovingGaussian_IL ( config . learnRate , config . threshold , transform , config . interpolation , imageType ) ; break ; default : throw new IllegalArgumentException ( "Unknown image type" ) ; } ret . setInitialVariance ( config . initialVariance ) ; ret . setMinimumDifference ( config . minimumDifference ) ; ret . setUnknownValue ( config . unknownValue ) ; return ret ; |
public class LdiSrl { /** * Extract rear sub - string from last - found delimiter .
* < pre >
* substringLastRear ( " foo . bar / baz . qux " , " . " , " / " )
* returns " qux "
* < / pre >
* @ param str The target string . ( NotNull )
* @ param delimiters The array of delimiters . ( NotNull )
* @ return The part of string . ( NotNull : if delimiter not found , returns argument - plain string ) */
public static String substringLastRear ( String str , String ... delimiters ) { } } | assertStringNotNull ( str ) ; return doSubstringFirstRear ( true , true , false , str , delimiters ) ; |
public class StaticConnectionProvider { /** * from ConnectionProvider */
public String getURL ( String ident ) { } } | Properties props = PropertiesUtil . getSubProperties ( _props , ident , DEFAULTS_KEY ) ; return props . getProperty ( "url" ) ; |
public class ICUBinary { /** * Reads the entire contents from the stream into a byte array
* and wraps it into a ByteBuffer . Closes the InputStream at the end . */
public static ByteBuffer getByteBufferFromInputStreamAndCloseStream ( InputStream is ) throws IOException { } } | try { // is . available ( ) may return 0 , or 1 , or the total number of bytes in the stream ,
// or some other number .
// Do not try to use is . available ( ) = = 0 to find the end of the stream !
byte [ ] bytes ; int avail = is . available ( ) ; if ( avail > 32 ) { // There are more bytes available than just the ICU data header length .
// With luck , it is the total number of bytes .
bytes = new byte [ avail ] ; } else { bytes = new byte [ 128 ] ; // empty . res files are even smaller
} // Call is . read ( . . . ) until one returns a negative value .
int length = 0 ; for ( ; ; ) { if ( length < bytes . length ) { int numRead = is . read ( bytes , length , bytes . length - length ) ; if ( numRead < 0 ) { break ; // end of stream
} length += numRead ; } else { // See if we are at the end of the stream before we grow the array .
int nextByte = is . read ( ) ; if ( nextByte < 0 ) { break ; } int capacity = 2 * bytes . length ; if ( capacity < 128 ) { capacity = 128 ; } else if ( capacity < 0x4000 ) { capacity *= 2 ; // Grow faster until we reach 16kB .
} // TODO Java 6 replace new byte [ ] and arraycopy ( ) : bytes = Arrays . copyOf ( bytes , capacity ) ;
byte [ ] newBytes = new byte [ capacity ] ; System . arraycopy ( bytes , 0 , newBytes , 0 , length ) ; bytes = newBytes ; bytes [ length ++ ] = ( byte ) nextByte ; } } return ByteBuffer . wrap ( bytes , 0 , length ) ; } finally { is . close ( ) ; } |
public class AbstractChainingPrintRenderer { /** * Change the current { @ link WikiPrinter } with the provided one .
* @ param wikiPrinter the new { @ link WikiPrinter } to use */
protected void pushPrinter ( WikiPrinter wikiPrinter ) { } } | this . printers . push ( wikiPrinter ) ; // Since we ' re setting a new printer to use , make sure that all print renderers in the chain have the new
// printer set . Only do this if we ' re on the top level Print Renderer .
if ( getListenerChain ( ) . indexOf ( getClass ( ) ) == 0 ) { ChainingListener nextListener = this ; while ( ( nextListener = getListenerChain ( ) . getNextListener ( nextListener . getClass ( ) ) ) != null ) { if ( PrintRenderer . class . isAssignableFrom ( nextListener . getClass ( ) ) ) { ( ( PrintRenderer ) nextListener ) . setPrinter ( wikiPrinter ) ; } } } |
public class KeyVaultClientBaseImpl { /** * Verifies a signature using a specified key .
* The VERIFY operation is applicable to symmetric keys stored in Azure Key Vault . VERIFY is not strictly necessary for asymmetric keys stored in Azure Key Vault since signature verification can be performed using the public portion of the key but this operation is supported as a convenience for callers that only have a key - reference and not the public portion of the key . This operation requires the keys / verify permission .
* @ param vaultBaseUrl The vault name , for example https : / / myvault . vault . azure . net .
* @ param keyName The name of the key .
* @ param keyVersion The version of the key .
* @ param algorithm The signing / verification algorithm . For more information on possible algorithm types , see JsonWebKeySignatureAlgorithm . Possible values include : ' PS256 ' , ' PS384 ' , ' PS512 ' , ' RS256 ' , ' RS384 ' , ' RS512 ' , ' RSNULL ' , ' ES256 ' , ' ES384 ' , ' ES512 ' , ' ES256K '
* @ param digest The digest used for signing .
* @ param signature The signature to be verified .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ throws KeyVaultErrorException thrown if the request is rejected by server
* @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
* @ return the KeyVerifyResult object if successful . */
public KeyVerifyResult verify ( String vaultBaseUrl , String keyName , String keyVersion , JsonWebKeySignatureAlgorithm algorithm , byte [ ] digest , byte [ ] signature ) { } } | return verifyWithServiceResponseAsync ( vaultBaseUrl , keyName , keyVersion , algorithm , digest , signature ) . toBlocking ( ) . single ( ) . body ( ) ; |
public class DefaultRibbonPanel { /** * Get view panel .
* @ return The viewPanel Canvas */
public Canvas getViewPanel ( ) { } } | VLayout layout = new VLayout ( 5 ) ; layout . setPadding ( 5 ) ; MapWidget mapWidget = new MapWidget ( "mapGuwOsm" , "appGuw" ) ; final RibbonBarLayout ribbonBar = new RibbonBarLayout ( mapWidget , "appGuw" , "guwRibbonBar1" ) ; ribbonBar . setSize ( "100%" , "94px" ) ; ToolStrip toolStrip = new ToolStrip ( ) ; toolStrip . setWidth100 ( ) ; ToolStripButton btn1 = new ToolStripButton ( "Toggle group title" ) ; btn1 . addClickHandler ( new ClickHandler ( ) { public void onClick ( ClickEvent event ) { ribbonBar . setShowGroupTitles ( ! ribbonBar . isShowGroupTitles ( ) ) ; } } ) ; toolStrip . addButton ( btn1 ) ; layout . addMember ( toolStrip ) ; layout . addMember ( ribbonBar ) ; layout . addMember ( mapWidget ) ; return layout ; |
public class DefaultHistoryEventProducer { /** * Batch */
@ Override public HistoryEvent createBatchStartEvent ( Batch batch ) { } } | HistoryEvent historicBatch = createBatchEvent ( ( BatchEntity ) batch , HistoryEventTypes . BATCH_START ) ; if ( isHistoryRemovalTimeStrategyStart ( ) ) { provideRemovalTime ( ( HistoricBatchEntity ) historicBatch ) ; } return historicBatch ; |
public class ElementSelectors { /** * Elements with the same local name ( and namespace URI - if any )
* and attribute values for the given attribute names can be
* compared .
* < p > Namespace URIs of attributes are those of the attributes on
* the control element or the null namespace if they don ' t
* exist . < / p > */
public static ElementSelector byNameAndAttributesControlNS ( final String ... attribs ) { } } | if ( attribs == null ) { throw new IllegalArgumentException ( ATTRIBUTES_MUST_NOT_BE_NULL ) ; } final Collection < String > qs = Arrays . asList ( attribs ) ; if ( any ( qs , new IsNullPredicate ( ) ) ) { throw new IllegalArgumentException ( ATTRIBUTES_MUST_NOT_CONTAIN_NULL_VALUES ) ; } final HashSet < String > as = new HashSet < String > ( qs ) ; return new ElementSelector ( ) { @ Override public boolean canBeCompared ( Element controlElement , Element testElement ) { if ( ! byName . canBeCompared ( controlElement , testElement ) ) { return false ; } Map < QName , String > cAttrs = Nodes . getAttributes ( controlElement ) ; Map < String , QName > qNameByLocalName = new HashMap < String , QName > ( ) ; for ( QName q : cAttrs . keySet ( ) ) { String local = q . getLocalPart ( ) ; if ( as . contains ( local ) ) { qNameByLocalName . put ( local , q ) ; } } for ( String a : as ) { QName q = qNameByLocalName . get ( a ) ; if ( q == null ) { qNameByLocalName . put ( a , new QName ( a ) ) ; } } return mapsEqualForKeys ( cAttrs , Nodes . getAttributes ( testElement ) , qNameByLocalName . values ( ) ) ; } } ; |
public class GroupHandlerImpl { /** * Notifying listeners before group deletion .
* @ param group
* the group which is used in delete operation
* @ throws Exception
* if any listener failed to handle the event */
private void preDelete ( Group group ) throws Exception { } } | for ( GroupEventListener listener : listeners ) { listener . preDelete ( group ) ; } |
public class ReactionChain { /** * Added a IReaction for this chain in position .
* @ param reaction The IReaction
* @ param position The position in this chain where the reaction is to be inserted */
public void addReaction ( IReaction reaction , int position ) { } } | hashMapChain . put ( reaction , position ) ; this . addReaction ( reaction ) ; |
public class PtoPOutputHandler { /** * Put a message on this PtoPOutputHandler for delivery to the remote ME .
* This method is called during the preCommitCallback from the
* messageStore
* @ param msg The message to be delivered
* @ param transaction The transaction to be used ( must at least have an autocommit transaction )
* @ param inputHandlerStore The input handler putting this message
* @ param storedByIH true if the message has already been stored in the IH
* @ return true if the message was stored in the IH ( either before or during this call )
* @ throws SIStoreException thrown if there is a problem in the message store */
public boolean put ( SIMPMessage msg , TransactionCommon transaction , InputHandlerStore inputHandlerStore , boolean storedByIH ) throws SIResourceException { } } | if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "put" , new Object [ ] { msg , transaction , inputHandlerStore , new Boolean ( storedByIH ) } ) ; // Get the JsMessage as we need to update the Guaranteed fields
JsMessage jsMsg = msg . getMessage ( ) ; SIMPUtils . setGuaranteedDeliveryProperties ( jsMsg , messageProcessor . getMessagingEngineUuid ( ) , transmissionItemStream . getLocalizingMEUuid ( ) , null , null , destinationHandler . getUuid ( ) , ProtocolType . UNICASTINPUT , GDConfig . PROTOCOL_VERSION ) ; // Remember if the original user transaction was a real one or not
( ( MessageItem ) msg ) . setTransacted ( ! transaction . isAutoCommit ( ) ) ; try { // Optimitically add the message to the sourceStreamManager before adding
// the item to the itemstream , as this ensures the required GD fields are
// initialised without having to initialise them to dummy values .
boolean addedToStream = sourceStreamManager . addMessage ( msg ) ; // If this message was not added to the stream ( the message is best effort )
// there is no need to add it to the itemStream as there will be no possibility
// for recovery at a later date .
if ( ! addedToStream ) { // If the stream was guessed and we are best effort then discard
// ( Also avoids NPE for PK36530)
if ( ! msg . getStreamIsGuess ( ) ) { // If the destination in a Link add Link specific properties to message
if ( isLink ) { // Prevent any concurrent serialization of the JsMessage that could fail
// due to having one property set but not the other
synchronized ( ( ( MessageItem ) msg ) . getSynchUpdateLock ( ) ) { jsMsg = addLinkProps ( jsMsg ) ; } } // If the message was not transacted we can send it now
if ( ! msg . isTransacted ( ) ) { if ( TraceComponent . isAnyTracingEnabled ( ) && UserTrace . tc_mt . isDebugEnabled ( ) ) UserTrace . traceOutboundSend ( jsMsg , routingMEUuid , destinationHandler . getName ( ) , destinationHandler . isForeignBus ( ) || destinationHandler . isLink ( ) , destinationHandler . isMQLink ( ) , destinationHandler . isTemporary ( ) ) ; // Send message using MPIO
mpio . sendToMe ( routingMEUuid , msg . getPriority ( ) , jsMsg ) ; } // Otherwise we wait for the transaction to commit
else msg . registerMessageEventListener ( MessageEvents . POST_COMMITTED_TRANSACTION , this ) ; } } else { LocalTransaction siTran = null ; if ( ! msg . isTransacted ( ) ) { // Use a local transaction as code is driven from the commit callback
siTran = messageProcessor . getTXManager ( ) . createLocalTransaction ( false ) ; transaction = siTran ; } Transaction msTran = messageProcessor . resolveAndEnlistMsgStoreTransaction ( transaction ) ; // Add the item , but don ' t lock it until we are ready to send it
try { transmissionItemStream . addItem ( ( MessageItem ) msg , msTran ) ; } catch ( OutOfCacheSpace e ) { // No FFDC code needed
// If the add failed , remove the message from the sourceStreamManager
sourceStreamManager . removeMessage ( msg ) ; throw e ; } catch ( MessageStoreException e ) { // FFDC
FFDCFilter . processException ( e , "com.ibm.ws.sib.processor.impl.PtoPOutputHandler.put" , "1:505:1.241" , this ) ; // If the add failed , remove the message from the sourceStreamManager
sourceStreamManager . removeMessage ( msg ) ; throw e ; } registerForEvents ( msg ) ; if ( siTran != null ) { try { siTran . commit ( ) ; } catch ( SIConnectionLostException e ) { // No FFDC code needed
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "put" , "SIResourceException" ) ; throw new SIResourceException ( e ) ; } catch ( SIIncorrectCallException e ) { // No FFDC code needed
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "put" , "SIResourceException" ) ; throw new SIResourceException ( e ) ; } catch ( SIErrorException e ) { // No FFDC code needed
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "put" , "SIResourceException" ) ; throw new SIResourceException ( e ) ; } } } } catch ( OutOfCacheSpace e ) { // No FFDC code needed
SibTr . exception ( tc , e ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "put" , e ) ; throw new SIResourceException ( e ) ; } catch ( MessageStoreException e ) { // MessageStoreException shouldn ' t occur so FFDC .
FFDCFilter . processException ( e , "com.ibm.ws.sib.processor.impl.PtoPOutputHandler.put" , "1:555:1.241" , this ) ; SibTr . exception ( tc , e ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "put" , e ) ; SibTr . error ( tc , "INTERNAL_MESSAGING_ERROR_CWSIP0002" , new Object [ ] { "com.ibm.ws.sib.processor.impl.PtoPOutputHandler" , "1:565:1.241" , e } ) ; throw new SIResourceException ( nls . getFormattedMessage ( "INTERNAL_MESSAGING_ERROR_CWSIP0002" , new Object [ ] { "com.ibm.ws.sib.processor.impl.PtoPOutputHandler" , "1:571:1.241" , e } , null ) , e ) ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "put" , new Boolean ( storedByIH ) ) ; return storedByIH ; |
public class Resources { /** * Replies the input stream of a resource .
* < p > You may use Unix - like syntax to write the resource path , ie .
* you may use slashes to separate filenames .
* < p > The name of { @ code packagename } is translated into a resource
* path ( by replacing the dots by slashes ) and the given path
* is append to . For example , the two following codes are equivalent : < pre > < code >
* Resources . getResources ( Package . getPackage ( " org . arakhne . afc " ) , " / a / b / c / d . png " ) ;
* Resources . getResources ( " org / arakhne / afc / a / b / c / d . png " ) ;
* < / code > < / pre >
* < p > If the { @ code classLoader } parameter is < code > null < / code > ,
* the class loader replied by { @ link ClassLoaderFinder } is used .
* If this last is < code > null < / code > , the class loader of
* the Resources class is used .
* @ param classLoader is the research scope . If < code > null < / code > ,
* the class loader replied by { @ link ClassLoaderFinder } is used .
* @ param packagename is the package in which the resource should be located .
* @ param path is the relative path of the resource in the package .
* @ return the url of the resource or < code > null < / code > if the resource was
* not found in class paths .
* @ since 6.2 */
@ Pure public static InputStream getResourceAsStream ( ClassLoader classLoader , Package packagename , String path ) { } } | if ( packagename == null || path == null ) { return null ; } final StringBuilder b = new StringBuilder ( ) ; b . append ( packagename . getName ( ) . replaceAll ( Pattern . quote ( "." ) , // $ NON - NLS - 1 $
Matcher . quoteReplacement ( NAME_SEPARATOR ) ) ) ; if ( ! path . startsWith ( NAME_SEPARATOR ) ) { b . append ( NAME_SEPARATOR ) ; } b . append ( path ) ; ClassLoader cl = classLoader ; if ( cl == null ) { cl = packagename . getClass ( ) . getClassLoader ( ) ; } return getResourceAsStream ( cl , b . toString ( ) ) ; |
public class DataSourceService { /** * Declarative Services method for unsetting the connection manager service reference
* @ param ref reference to the service */
protected void unsetConnectionManager ( ServiceReference < ConnectionManagerService > ref ) { } } | if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) Tr . debug ( this , tc , "unsetConnectionManager" , ref ) ; |
public class EmbeddedXMLConfigValidator { /** * Prints the specified error message .
* @ param key The resource bundle key for the message .
* @ param substitutions The values to be substituted for the tokens in the
* message skeleton . */
private void printErrorMessage ( String key , Object ... substitutions ) { } } | Tr . error ( tc , key , substitutions ) ; errorMsgIssued = true ; |
public class CSVConfig { /** * Set the given configuration option name to the given value . The value ' s format must
* be compatible with the option ' s type ( String , int , or boolean ) . If the given option
* name is unknown , an exception is thrown .
* @ param name Option name ( e . g . , " app " , " batchsize " )
* @ param value Option value ( e . g . , " Email " , " 10000 " , " true " ) . */
public void set ( String name , String value ) { } } | try { Field field = CSVConfig . class . getDeclaredField ( name ) ; if ( field . getType ( ) . toString ( ) . compareToIgnoreCase ( "int" ) == 0 ) { field . set ( this , Integer . parseInt ( value ) ) ; } else if ( field . getType ( ) . toString ( ) . compareToIgnoreCase ( "boolean" ) == 0 ) { if ( value . equalsIgnoreCase ( "t" ) ) value = "true" ; field . set ( this , Boolean . parseBoolean ( value ) ) ; } else { field . set ( this , value ) ; } } catch ( NoSuchFieldException | SecurityException e ) { throw new IllegalArgumentException ( "Unknown parameter: " + name ) ; } catch ( Exception e ) { throw new IllegalArgumentException ( "Invalid value for '" + name + "': " + value ) ; } |
public class ConceptDrawProjectReader { /** * Reads a single day for a calendar .
* @ param mpxjCalendar ProjectCalendar instance
* @ param day ConceptDraw PROJECT week day */
private void readWeekDay ( ProjectCalendar mpxjCalendar , WeekDay day ) { } } | if ( day . isIsDayWorking ( ) ) { ProjectCalendarHours hours = mpxjCalendar . addCalendarHours ( day . getDay ( ) ) ; for ( Document . Calendars . Calendar . WeekDays . WeekDay . TimePeriods . TimePeriod period : day . getTimePeriods ( ) . getTimePeriod ( ) ) { hours . addRange ( new DateRange ( period . getFrom ( ) , period . getTo ( ) ) ) ; } } |
public class MethodPod { /** * pipe publish registration call */
@ Override public < T > void inPipe ( HeadersAmp headers , PipeSub < T > result , StubAmp actor , Object [ ] args ) { } } | result . fail ( new UnsupportedOperationException ( getClass ( ) . getName ( ) ) ) ; |
public class HornSchunckPyramid { /** * SOR iteration for border pixels */
private float iterationSorSafe ( GrayF32 image1 , int x , int y , int pixelIndex ) { } } | float w = SOR_RELAXATION ; float uf ; float vf ; float ui = initFlowX . data [ pixelIndex ] ; float vi = initFlowY . data [ pixelIndex ] ; float u = flowX . data [ pixelIndex ] ; float v = flowY . data [ pixelIndex ] ; float I1 = image1 . data [ pixelIndex ] ; float I2 = warpImage2 . data [ pixelIndex ] ; float I2x = warpDeriv2X . data [ pixelIndex ] ; float I2y = warpDeriv2Y . data [ pixelIndex ] ; float AU = A_safe ( x , y , flowX ) ; float AV = A_safe ( x , y , flowY ) ; flowX . data [ pixelIndex ] = uf = ( 1 - w ) * u + w * ( ( I1 - I2 + I2x * ui - I2y * ( v - vi ) ) * I2x + alpha2 * AU ) / ( I2x * I2x + alpha2 ) ; flowY . data [ pixelIndex ] = vf = ( 1 - w ) * v + w * ( ( I1 - I2 + I2y * vi - I2x * ( uf - ui ) ) * I2y + alpha2 * AV ) / ( I2y * I2y + alpha2 ) ; return ( uf - u ) * ( uf - u ) + ( vf - v ) * ( vf - v ) ; |
public class DescribeVpcEndpointServicesResult { /** * Information about the service .
* @ return Information about the service . */
public java . util . List < ServiceDetail > getServiceDetails ( ) { } } | if ( serviceDetails == null ) { serviceDetails = new com . amazonaws . internal . SdkInternalList < ServiceDetail > ( ) ; } return serviceDetails ; |
public class Tuple1 { /** * Apply this tuple as arguments to a function . */
public final < R > R map ( Function < ? super T1 , ? extends R > function ) { } } | return function . apply ( v1 ) ; |
public class CodeWriter { /** * Returns the best name to identify { @ code className } with in the current context . This uses the
* available imports and the current scope to find the shortest name available . It does not honor
* names visible due to inheritance . */
String lookupName ( ClassName className ) { } } | // If the top level simple name is masked by a current type variable , use the canonical name .
String topLevelSimpleName = className . topLevelClassName ( ) . simpleName ( ) ; if ( currentTypeVariables . contains ( topLevelSimpleName ) ) { return className . canonicalName ; } // Find the shortest suffix of className that resolves to className . This uses both local type
// names ( so ` Entry ` in ` Map ` refers to ` Map . Entry ` ) . Also uses imports .
boolean nameResolved = false ; for ( ClassName c = className ; c != null ; c = c . enclosingClassName ( ) ) { ClassName resolved = resolve ( c . simpleName ( ) ) ; nameResolved = resolved != null ; if ( resolved != null && Objects . equals ( resolved . canonicalName , c . canonicalName ) ) { int suffixOffset = c . simpleNames ( ) . size ( ) - 1 ; return join ( "." , className . simpleNames ( ) . subList ( suffixOffset , className . simpleNames ( ) . size ( ) ) ) ; } } // If the name resolved but wasn ' t a match , we ' re stuck with the fully qualified name .
if ( nameResolved ) { return className . canonicalName ; } // If the class is in the same package , we ' re done .
if ( Objects . equals ( packageName , className . packageName ( ) ) ) { referencedNames . add ( topLevelSimpleName ) ; return join ( "." , className . simpleNames ( ) ) ; } // We ' ll have to use the fully - qualified name . Mark the type as importable for a future pass .
if ( ! javadoc ) { importableType ( className ) ; } return className . canonicalName ; |
public class HtmlUtils { /** * Convert a test result status into an HTML CSS class . */
static String getStatusCssClass ( DeviceTestResult testResult ) { } } | String status ; switch ( testResult . getStatus ( ) ) { case PASS : status = "pass" ; break ; case IGNORED : status = "ignored" ; break ; case FAIL : status = "fail" ; break ; case ASSUMPTION_FAILURE : status = "assumption-violation" ; break ; default : throw new IllegalArgumentException ( "Unknown result status: " + testResult . getStatus ( ) ) ; } return status ; |
public class IndyMath { /** * Choose a method to replace the originally chosen metaMethod to have a
* more efficient call path . */
public static boolean chooseMathMethod ( Selector info , MetaMethod metaMethod ) { } } | Map < MethodType , MethodHandle > xmap = methods . get ( info . name ) ; if ( xmap == null ) return false ; MethodType type = replaceWithMoreSpecificType ( info . args , info . targetType ) ; type = widenOperators ( type ) ; MethodHandle handle = xmap . get ( type ) ; if ( handle == null ) return false ; info . handle = handle ; return true ; |
public class HttpHeaders { /** * @ deprecated Use { @ link # get ( CharSequence ) } instead .
* Returns the value of the { @ code " Host " } header . */
@ Deprecated public static String getHost ( HttpMessage message ) { } } | return message . headers ( ) . get ( HttpHeaderNames . HOST ) ; |
public class StackResourceDrift { /** * Context information that enables AWS CloudFormation to uniquely identify a resource . AWS CloudFormation uses
* context key - value pairs in cases where a resource ' s logical and physical IDs are not enough to uniquely identify
* that resource . Each context key - value pair specifies a unique resource that contains the targeted resource .
* @ param physicalResourceIdContext
* Context information that enables AWS CloudFormation to uniquely identify a resource . AWS CloudFormation
* uses context key - value pairs in cases where a resource ' s logical and physical IDs are not enough to
* uniquely identify that resource . Each context key - value pair specifies a unique resource that contains the
* targeted resource . */
public void setPhysicalResourceIdContext ( java . util . Collection < PhysicalResourceIdContextKeyValuePair > physicalResourceIdContext ) { } } | if ( physicalResourceIdContext == null ) { this . physicalResourceIdContext = null ; return ; } this . physicalResourceIdContext = new com . amazonaws . internal . SdkInternalList < PhysicalResourceIdContextKeyValuePair > ( physicalResourceIdContext ) ; |
public class DecisionCriteria { /** * Returns the best option and the payoff under hurwiczAlpha strategy
* @ param payoffMatrix
* @ param alpha
* @ return */
public static Map . Entry < Object , Object > hurwiczAlpha ( DataTable2D payoffMatrix , double alpha ) { } } | if ( payoffMatrix . isValid ( ) == false ) { throw new IllegalArgumentException ( "The payoff matrix does not have a rectangular format." ) ; } AssociativeArray minPayoffs = new AssociativeArray ( ) ; AssociativeArray maxPayoffs = new AssociativeArray ( ) ; for ( Map . Entry < Object , AssociativeArray > entry : payoffMatrix . entrySet ( ) ) { // Object event = entry . getKey ( ) ;
AssociativeArray optionList = entry . getValue ( ) ; for ( Map . Entry < Object , Object > entry2 : optionList . entrySet ( ) ) { Object option = entry2 . getKey ( ) ; Double payoff = TypeInference . toDouble ( entry2 . getValue ( ) ) ; Double currentMinPayoffOption = minPayoffs . getDouble ( option ) ; if ( currentMinPayoffOption == null || payoff < currentMinPayoffOption ) { minPayoffs . put ( option , payoff ) ; } Double currentMaxPayoffOption = maxPayoffs . getDouble ( option ) ; if ( currentMaxPayoffOption == null || payoff > currentMaxPayoffOption ) { maxPayoffs . put ( option , payoff ) ; } } } AssociativeArray combinedPayoffs = new AssociativeArray ( ) ; for ( Map . Entry < Object , Object > entry : maxPayoffs . entrySet ( ) ) { Object option = entry . getKey ( ) ; combinedPayoffs . put ( option , TypeInference . toDouble ( entry . getValue ( ) ) * alpha + minPayoffs . getDouble ( option ) * ( 1.0 - alpha ) ) ; } Map . Entry < Object , Object > entry = MapMethods . selectMaxKeyValue ( combinedPayoffs ) ; return entry ; |
public class Level { /** * Checks to see if this level or any of its children contain SpecTopics .
* @ return True if the level or the levels children contain at least one SpecTopic . */
public boolean hasSpecTopics ( ) { } } | if ( getSpecTopics ( ) . size ( ) > 0 ) { return true ; } for ( final Level childLevel : levels ) { if ( childLevel . hasSpecTopics ( ) ) { return true ; } } return false ; |
public class MethodParameter { /** * Start this tag ' s lifecycle . Verify that this tag is nested within
* a { @ link AbstractCallMethod } tag and that one of the " null " and " value "
* attributes are set .
* @ return { @ link # SKIP _ BODY }
* @ throws JspException if an error occurs getting the parameter */
public int doStartTag ( ) throws JspException { } } | Tag parent = getParent ( ) ; if ( parent == null || ! ( parent instanceof AbstractCallMethod ) ) { String msg = Bundle . getErrorString ( "Tags_MethodParameter_invalidParent" ) ; registerTagError ( msg , null ) ; reportErrors ( ) ; return SKIP_BODY ; } if ( ! _isNull && _value == null ) { String msg = Bundle . getErrorString ( "Tags_MethodParameter_undefinedValue" ) ; registerTagError ( msg , null ) ; reportErrors ( ) ; return SKIP_BODY ; } return SKIP_BODY ; |
public class UnsafeOperations { /** * Copies the primitive of the specified type from the given field offset in the source object
* to the same location in the copy
* @ param source The object to copy from
* @ param copy The target object
* @ param type The type of primitive at the given offset - e . g . java . lang . Boolean . TYPE
* @ param offset The offset to copy from */
public final void copyPrimitiveAtOffset ( Object source , Object copy , Class < ? > type , long offset ) { } } | if ( java . lang . Boolean . TYPE == type ) { boolean origFieldValue = THE_UNSAFE . getBoolean ( source , offset ) ; THE_UNSAFE . putBoolean ( copy , offset , origFieldValue ) ; } else if ( java . lang . Byte . TYPE == type ) { byte origFieldValue = THE_UNSAFE . getByte ( source , offset ) ; THE_UNSAFE . putByte ( copy , offset , origFieldValue ) ; } else if ( java . lang . Character . TYPE == type ) { char origFieldValue = THE_UNSAFE . getChar ( source , offset ) ; THE_UNSAFE . putChar ( copy , offset , origFieldValue ) ; } else if ( java . lang . Short . TYPE == type ) { short origFieldValue = THE_UNSAFE . getShort ( source , offset ) ; THE_UNSAFE . putShort ( copy , offset , origFieldValue ) ; } else if ( java . lang . Integer . TYPE == type ) { int origFieldValue = THE_UNSAFE . getInt ( source , offset ) ; THE_UNSAFE . putInt ( copy , offset , origFieldValue ) ; } else if ( java . lang . Long . TYPE == type ) { long origFieldValue = THE_UNSAFE . getLong ( source , offset ) ; THE_UNSAFE . putLong ( copy , offset , origFieldValue ) ; } else if ( java . lang . Float . TYPE == type ) { float origFieldValue = THE_UNSAFE . getFloat ( source , offset ) ; THE_UNSAFE . putFloat ( copy , offset , origFieldValue ) ; } else if ( java . lang . Double . TYPE == type ) { double origFieldValue = THE_UNSAFE . getDouble ( source , offset ) ; THE_UNSAFE . putDouble ( copy , offset , origFieldValue ) ; } |
public class ServiceBuilder { /** * Attaches the given StreamSegmentStore creator to this ServiceBuilder . The given Function will not be invoked
* right away ; it will be called when needed .
* @ param streamSegmentStoreCreator The Function to attach .
* @ return This ServiceBuilder . */
public ServiceBuilder withStreamSegmentStore ( Function < ComponentSetup , StreamSegmentStore > streamSegmentStoreCreator ) { } } | Preconditions . checkNotNull ( streamSegmentStoreCreator , "streamSegmentStoreCreator" ) ; this . streamSegmentStoreCreator = streamSegmentStoreCreator ; return this ; |
public class Resources { /** * Obtain the appropriate tagset according to language and postag .
* @ param postag
* the postag
* @ param lang the language
* @ return the mapped tag */
public static String getKafTagSet ( final String postag , final String lang ) { } } | String tag = null ; if ( lang . equalsIgnoreCase ( "de" ) ) { tag = mapGermanTagSetToKaf ( postag ) ; } else if ( lang . equalsIgnoreCase ( "en" ) ) { tag = mapEnglishTagSetToKaf ( postag ) ; } else if ( lang . equalsIgnoreCase ( "es" ) ) { tag = mapSpanishTagSetToKaf ( postag ) ; } else if ( lang . equalsIgnoreCase ( "eu" ) ) { tag = mapUDTagSetToKaf ( postag ) ; } else if ( lang . equalsIgnoreCase ( "gl" ) ) { tag = mapGalicianTagSetToKaf ( postag ) ; } else if ( lang . equalsIgnoreCase ( "fr" ) ) { tag = mapFrenchTagSetToKaf ( postag ) ; } else if ( lang . equalsIgnoreCase ( "it" ) ) { tag = mapUDTagSetToKaf ( postag ) ; } else if ( lang . equalsIgnoreCase ( "nl" ) ) { tag = mapWotanTagSetToKaf ( postag ) ; } else { tag = "O" ; } return tag ; |
public class DBManagerService { /** * Update the corresponding DBService with the given tenant definition . This method is
* called when an existing tenant is modified . If the DBService for the tenant has not
* yet been cached , this method is a no - op . Otherwise , the cached DBService is updated
* with the new tenant definition .
* @ param tenantDef Updated { @ link TenantDefinition } . */
public void updateTenantDef ( TenantDefinition tenantDef ) { } } | synchronized ( m_tenantDBMap ) { DBService dbservice = m_tenantDBMap . get ( tenantDef . getName ( ) ) ; if ( dbservice != null ) { Tenant updatedTenant = new Tenant ( tenantDef ) ; m_logger . info ( "Updating DBService for tenant: {}" , updatedTenant . getName ( ) ) ; dbservice . updateTenant ( updatedTenant ) ; } } |
public class Bean { /** * Add a list of references to a property on this bean .
* A reference identify other beans based on schema and instance id .
* @ param propertyName name of the property as defined by the bean ' s schema .
* @ param refs the reference as defined by the bean ' s schema . */
public void addReference ( final String propertyName , final Collection < BeanId > refs ) { } } | Preconditions . checkNotNull ( refs ) ; Preconditions . checkNotNull ( propertyName ) ; checkCircularReference ( refs . toArray ( new BeanId [ refs . size ( ) ] ) ) ; List < BeanId > list = references . get ( propertyName ) ; if ( list == null ) { list = new ArrayList < > ( ) ; list . addAll ( refs ) ; references . put ( propertyName , list ) ; } else { list . addAll ( refs ) ; } |
public class Maps { /** * Returns the value that is stored for the given key in the given map .
* If there is no value stored , then 0 will be inserted into the map
* and returned
* @ param < K > The key type
* @ param map The map
* @ param k The key
* @ return The value */
public static < K > Integer getCount ( Map < K , Integer > map , K k ) { } } | Integer count = map . get ( k ) ; if ( count == null ) { count = 0 ; map . put ( k , count ) ; } return count ; |
public class TextUtils { /** * Checks whether a text contains a specific fragment , specifying ( offset , len ) pairs
* for limiting the fragments to be checked .
* @ param caseSensitive whether the comparison must be done in a case - sensitive or case - insensitive way .
* @ param text the text to be checked for fragments .
* @ param textOffset the offset of the text .
* @ param textLen the length of the text .
* @ param fragment the fragment to be searched .
* @ param fragmentOffset the offset of the fragment .
* @ param fragmentLen the length of the fragment .
* @ return whether the text contains the fragment or not . */
public static boolean contains ( final boolean caseSensitive , final char [ ] text , final int textOffset , final int textLen , final char [ ] fragment , final int fragmentOffset , final int fragmentLen ) { } } | if ( text == null ) { throw new IllegalArgumentException ( "Text cannot be null" ) ; } if ( fragment == null ) { throw new IllegalArgumentException ( "Fragment cannot be null" ) ; } if ( textLen < fragmentLen ) { return false ; } if ( fragmentLen == 0 ) { return true ; } char c1 , c2 ; for ( int i = 0 , j = 0 ; i < textLen ; i ++ ) { c1 = text [ textOffset + i ] ; c2 = fragment [ fragmentOffset + j ] ; if ( c1 == c2 ) { if ( ++ j == fragmentLen ) { return true ; } continue ; } if ( ! caseSensitive ) { c1 = Character . toUpperCase ( c1 ) ; c2 = Character . toUpperCase ( c2 ) ; if ( c1 == c2 ) { if ( ++ j == fragmentLen ) { return true ; } continue ; } // We check both upper and lower case because that is how String # equalsIgnoreCase ( ) is defined .
// See String # regionMatches ( boolean , int , String , int , int )
if ( Character . toLowerCase ( c1 ) == Character . toLowerCase ( c2 ) ) { if ( ++ j == fragmentLen ) { return true ; } continue ; } } if ( j > 0 ) { // Go back to matching start + 1 , in order to be able to match things like " aab " with fragment " ab "
i -= j ; } j = 0 ; } return false ; |
public class FSEditLog { /** * Check for gaps in the edit log input stream list .
* Note : we ' re assuming that the list is sorted and that txid ranges don ' t
* overlap . This could be done better and with more generality with an
* interval tree . */
private void checkForGaps ( Collection < EditLogInputStream > streams , long fromTxId , long toAtLeastTxId , boolean inProgressOk ) throws IOException { } } | Iterator < EditLogInputStream > iter = streams . iterator ( ) ; long txId = fromTxId ; while ( true ) { if ( txId > toAtLeastTxId ) return ; if ( ! iter . hasNext ( ) ) break ; EditLogInputStream elis = iter . next ( ) ; if ( elis . getFirstTxId ( ) > txId ) { break ; } long next = elis . getLastTxId ( ) ; if ( next == HdfsConstants . INVALID_TXID ) { if ( ! inProgressOk ) { throw new RuntimeException ( "inProgressOk = false, but " + "selectInputStreams returned an in-progress edit " + "log input stream (" + elis + ")" ) ; } // We don ' t know where the in - progress stream ends .
// It could certainly go all the way up to toAtLeastTxId .
return ; } txId = next + 1 ; } throw new IOException ( String . format ( "Gap in transactions. Expected to " + "be able to read up until at least txid %d but unable to find any " + "edit logs containing txid %d" , toAtLeastTxId , txId ) ) ; |
public class AzureStorageClient { /** * Upload a file to the storage account .
* @ param jobFolder the path to the destination folder within storage container .
* @ param file the source file .
* @ return the SAS URI to the uploaded file .
* @ throws IOException */
public URI uploadFile ( final String jobFolder , final File file ) throws IOException { } } | LOG . log ( Level . INFO , "Uploading [{0}] to [{1}]" , new Object [ ] { file , jobFolder } ) ; try { final CloudBlobClient cloudBlobClient = this . cloudBlobClientProvider . getCloudBlobClient ( ) ; final CloudBlobContainer container = cloudBlobClient . getContainerReference ( this . azureStorageContainerName ) ; final String destination = String . format ( "%s/%s" , jobFolder , file . getName ( ) ) ; final CloudBlockBlob blob = container . getBlockBlobReference ( destination ) ; try ( FileInputStream fis = new FileInputStream ( file ) ) { blob . upload ( fis , file . length ( ) ) ; } LOG . log ( Level . FINE , "Uploaded to: {0}" , blob . getStorageUri ( ) . getPrimaryUri ( ) ) ; return this . cloudBlobClientProvider . generateSharedAccessSignature ( blob , getSharedAccessBlobPolicy ( ) ) ; } catch ( final URISyntaxException | StorageException e ) { throw new IOException ( e ) ; } |
public class ShiroHelper { /** * Populate the authenticated user profiles in the Shiro subject .
* @ param profiles the linked hashmap of profiles */
public static void populateSubject ( final LinkedHashMap < String , CommonProfile > profiles ) { } } | if ( profiles != null && profiles . size ( ) > 0 ) { final List < CommonProfile > listProfiles = ProfileHelper . flatIntoAProfileList ( profiles ) ; try { if ( IS_FULLY_AUTHENTICATED_AUTHORIZER . isAuthorized ( null , listProfiles ) ) { SecurityUtils . getSubject ( ) . login ( new Pac4jToken ( listProfiles , false ) ) ; } else if ( IS_REMEMBERED_AUTHORIZER . isAuthorized ( null , listProfiles ) ) { SecurityUtils . getSubject ( ) . login ( new Pac4jToken ( listProfiles , true ) ) ; } } catch ( final HttpAction e ) { throw new TechnicalException ( e ) ; } } |
public class IceComponent { /** * Registers a collection of local candidates to the component .
* @ param candidatesWrapper
* The list of local candidates
* @ see IceComponent # addLocalCandidate ( LocalCandidateWrapper ) */
public void addLocalCandidates ( List < LocalCandidateWrapper > candidatesWrapper ) { } } | for ( LocalCandidateWrapper candidateWrapper : candidatesWrapper ) { addLocalCandidate ( candidateWrapper , false ) ; } sortCandidates ( ) ; |
public class Fn { /** * Creates a < i > function expression < / i > for building a function operating on
* a target object of the specified type .
* Examples :
* < ul >
* < li > < tt > Fn . on ( Types . STRING ) . get ( ) < / tt > returns < tt > Function & lt ; String , String & gt ; < / tt >
* which does nothing . < / li >
* < li > < tt > Fn . on ( Types . STRING ) . exec ( FnString . toInteger ( ) ) . get ( ) < / tt > returns
* < tt > Function & lt ; String , Integer & gt ; < / tt > which receives a String as input ,
* and outputs an Integer as output ( see { @ link Function } ) . < / li >
* < / ul >
* @ param type the type of the object on which the expression should start
* @ return an operator , ready for chaining */
public static < T > Level0GenericUniqOperator < T , T > on ( final Type < T > type ) { } } | return new Level0GenericUniqOperator < T , T > ( ExecutionTarget . forFn ( Normalisation . NONE ) ) ; |
public class SmallIntSet { /** * Either converts the given set to an IntSet if it is one or creates a new IntSet and copies the contents
* @ param set
* @ return */
public static SmallIntSet from ( Set < Integer > set ) { } } | if ( set instanceof SmallIntSet ) { return ( SmallIntSet ) set ; } else { return new SmallIntSet ( set ) ; } |
public class SslTlsUtil { /** * Initialization of trustStoreManager used to provide access to the configured trustStore .
* @ param _ trustStoreFile trust store file
* @ param _ trustStorePassword trust store password
* @ return TrustManager array or null
* @ throws IOException on error */
public static TrustManager [ ] initializeTrustManagers ( File _trustStoreFile , String _trustStorePassword ) throws IOException { } } | if ( _trustStoreFile == null ) { return null ; } String storeType = getStoreTypeByFileName ( _trustStoreFile ) ; boolean derEncoded = storeType == STORETYPE_DER_ENCODED ; if ( derEncoded ) { storeType = STORETYPE_JKS ; } String trustStorePwd = StringUtil . defaultIfBlank ( _trustStorePassword , System . getProperty ( "javax.net.ssl.trustStorePassword" ) ) ; LOGGER . debug ( "Creating trust store of type '" + storeType + "' from " + ( derEncoded ? "DER-encoded" : "" ) + " file '" + _trustStoreFile + "'" ) ; try { TrustManagerFactory trustMgrFactory = TrustManagerFactory . getInstance ( TrustManagerFactory . getDefaultAlgorithm ( ) ) ; KeyStore trustStore = KeyStore . getInstance ( storeType ) ; if ( derEncoded ) { FileInputStream fis = new FileInputStream ( _trustStoreFile ) ; X509Certificate certificate = ( X509Certificate ) CertificateFactory . getInstance ( "X.509" ) . generateCertificate ( fis ) ; trustStore . load ( null , null ) ; trustStore . setCertificateEntry ( "[der_cert_alias]" , certificate ) ; } else { trustStore . load ( new FileInputStream ( _trustStoreFile ) , trustStorePwd != null ? trustStorePwd . toCharArray ( ) : null ) ; } trustMgrFactory . init ( trustStore ) ; return trustMgrFactory . getTrustManagers ( ) ; } catch ( GeneralSecurityException _ex ) { throw new IOException ( "Error while setting up trustStore" , _ex ) ; } |
public class UniversalDateAndTimeStampImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
public void setUTCDiffM ( Integer newUTCDiffM ) { } } | Integer oldUTCDiffM = utcDiffM ; utcDiffM = newUTCDiffM ; if ( eNotificationRequired ( ) ) eNotify ( new ENotificationImpl ( this , Notification . SET , AfplibPackage . UNIVERSAL_DATE_AND_TIME_STAMP__UTC_DIFF_M , oldUTCDiffM , utcDiffM ) ) ; |
public class Servlet3SipServletMessageFactory { /** * ( non - Javadoc )
* @ see org . mobicents . servlet . sip . core . MobicentsSipServletMessageFactory # createSipServletRequest ( javax . sip . message . Request ,
* org . mobicents . servlet . sip . core . MobicentsSipFactory , org . mobicents . servlet . sip . core . session . MobicentsSipSession ,
* javax . sip . Transaction , javax . sip . Dialog , boolean ) */
@ Override public MobicentsSipServletRequest createSipServletRequest ( Request request , MobicentsSipSession sipSession , Transaction transaction , Dialog dialog , boolean createDialog ) { } } | return new Servlet3SipServletRequestImpl ( request , sipFactoryImpl , sipSession , transaction , dialog , createDialog ) ; |
public class Coordinate { /** * Creates a coordinate from a Json map with with " ~ table " and " ~ id " . This is the inverse of { @ link # asJson ( ) } . */
public static Coordinate fromJson ( Map < String , ? > json ) { } } | return Coordinate . of ( Intrinsic . getTable ( json ) , Intrinsic . getId ( json ) ) ; |
public class ProbeProxy { /** * Fire a probe event to the registered target .
* @ param probeId the generated probe identifier
* @ param instance the object instance emitting the probe or null
* @ param args the probe payload */
public final static void fireProbe ( long probeId , Object instance , Object target , Object args ) { } } | // Load statics onto the stack to avoid a window where they can be cleared
// between the test for null and the invocation of the method
Object proxyTarget = fireProbeTarget ; Method method = fireProbeMethod ; if ( proxyTarget == null || method == null ) { return ; } try { method . invoke ( proxyTarget , probeId , instance , target , args ) ; } catch ( Throwable t ) { t . printStackTrace ( ) ; } |
public class DBClusterSnapshotAttributesResult { /** * The list of attributes and values for the manual DB cluster snapshot .
* @ return The list of attributes and values for the manual DB cluster snapshot . */
public java . util . List < DBClusterSnapshotAttribute > getDBClusterSnapshotAttributes ( ) { } } | if ( dBClusterSnapshotAttributes == null ) { dBClusterSnapshotAttributes = new com . amazonaws . internal . SdkInternalList < DBClusterSnapshotAttribute > ( ) ; } return dBClusterSnapshotAttributes ; |
public class CheckMissingGetCssName { /** * Returns whether the node is the right hand side of an assignment or
* initialization of a variable named * _ ID of * _ ID _ . */
private boolean insideAssignmentToIdConstant ( Node n ) { } } | Node parent = n . getParent ( ) ; if ( parent . isAssign ( ) ) { String qname = parent . getFirstChild ( ) . getQualifiedName ( ) ; return qname != null && isIdName ( qname ) ; } else if ( parent . isName ( ) ) { Node grandParent = parent . getParent ( ) ; if ( grandParent != null && NodeUtil . isNameDeclaration ( grandParent ) ) { String name = parent . getString ( ) ; return isIdName ( name ) ; } else { return false ; } } else { return false ; } |
public class DatagramUtil { /** * 将一个int类型转换为四个字节的byte数组
* @ param data int类型的参数
* @ return byte类型的数组 */
public static byte [ ] convert ( int data ) { } } | long len = Integer . toUnsignedLong ( data ) ; byte [ ] b = new byte [ Datagram . LEN_LIMIT ] ; for ( int i = 0 ; i < b . length ; i ++ ) { b [ i ] = ( byte ) ( len >> ( ( b . length - i - 1 ) * 8 ) ) ; } return b ; |
public class ConfigurationAdminImpl { /** * ( non - Javadoc )
* @ see
* org . osgi . service . cm . ConfigurationAdmin # getConfiguration ( java . lang . String ,
* java . lang . String )
* If Configuration already exists ( exists in table , or serialized ) , return the
* existing configuration .
* If existing configuration ' s location is null , set it with specified
* location before returning it .
* If Configuration doesn ' t already exist , create a new Configuration objects
* with null properties
* and bound to the specified location including null location .
* SecurityException is thrown if caller doesn ' t have proper
* ConfigurationPermission .
* @ param pid
* @ param location */
@ Override public ExtendedConfiguration getConfiguration ( String pid , String location ) throws IOException { } } | this . caFactory . checkConfigurationPermission ( ) ; return caFactory . getConfigurationStore ( ) . getConfiguration ( pid , location ) ; |
public class Content { /** * Get object property . This helper method is the work horse of all content getters . An object property is not limited
* to object field ; it also includes array and list items , content instance getters and super - classes , as follow :
* < ul >
* < li > if object is instance of array or list property name should be a numeric value used as index ,
* < li > try to get given object instance field with requested property name and return its value ,
* < li > if no such field consider this content instance and try a getter with the property name ,
* < li > if no such method delegates { @ link # getContentObject ( Object , String ) } ,
* < li > if field is still null throws content exception .
* < / ul >
* @ param object instance to retrieve property from ,
* @ param property property name .
* @ return requested object property or null .
* @ throws IllegalArgumentException if any of object or property name arguments is null .
* @ throws TemplateException if property not found . */
private Object getObjectProperty ( Object object , String property ) throws IllegalArgumentException , TemplateException { } } | Params . notNull ( object , "Object" ) ; Params . notNull ( property , "Property" ) ; if ( object . getClass ( ) . isArray ( ) ) { try { int index = Integer . parseInt ( property ) ; return Array . get ( object , index ) ; } catch ( NumberFormatException unused ) { throw new TemplateException ( "Invalid property on |%s|. Expect numeric used as index but got |%s|." , object . getClass ( ) , property ) ; } } if ( object instanceof List < ? > ) { try { List < ? > list = ( List < ? > ) object ; int index = Integer . parseInt ( property ) ; return list . get ( index ) ; } catch ( NumberFormatException unused ) { throw new TemplateException ( "Invalid property on |%s|. Expect numeric used as index but got |%s|." , object . getClass ( ) , property ) ; } } // try to load field value from object hierarchy and if not found make a second attempt using content getter
// if both fails throw exception
// next logic uses exception for normal flow control but i do not see reasonable alternative
try { return Classes . getFieldEx ( object . getClass ( ) , Strings . toMemberName ( property ) ) . get ( object ) ; } catch ( NoSuchFieldException expectedMissingField ) { return getContentObject ( object , property ) ; } catch ( Exception unexpected ) { throw new BugError ( unexpected ) ; } |
public class TotalOrderPartitioner { /** * matching key types enforced by passing in */
@ SuppressWarnings ( "unchecked" ) // map output key class
private K [ ] readPartitions ( FileSystem fs , Path p , Class < K > keyClass , JobConf job ) throws IOException { } } | SequenceFile . Reader reader = new SequenceFile . Reader ( fs , p , job ) ; ArrayList < K > parts = new ArrayList < K > ( ) ; K key = ( K ) ReflectionUtils . newInstance ( keyClass , job ) ; NullWritable value = NullWritable . get ( ) ; while ( reader . next ( key , value ) ) { parts . add ( key ) ; key = ( K ) ReflectionUtils . newInstance ( keyClass , job ) ; } reader . close ( ) ; return parts . toArray ( ( K [ ] ) Array . newInstance ( keyClass , parts . size ( ) ) ) ; |
public class MsgpackIOUtil { /** * Parses the { @ code messages } from the stream using the given { @ code schema } . */
public static < T > List < T > parseListFrom ( MessageBufferInput in , Schema < T > schema , boolean numeric ) throws IOException { } } | MessageUnpacker unpacker = MessagePack . newDefaultUnpacker ( in ) ; try { return parseListFrom ( unpacker , schema , numeric ) ; } finally { unpacker . close ( ) ; } |
public class AWSLogsClient { /** * Creates a log group with the specified name .
* You can create up to 5000 log groups per account .
* You must use the following guidelines when naming a log group :
* < ul >
* < li >
* Log group names must be unique within a region for an AWS account .
* < / li >
* < li >
* Log group names can be between 1 and 512 characters long .
* < / li >
* < li >
* Log group names consist of the following characters : a - z , A - Z , 0-9 , ' _ ' ( underscore ) , ' - ' ( hyphen ) , ' / ' ( forward
* slash ) , and ' . ' ( period ) .
* < / li >
* < / ul >
* If you associate a AWS Key Management Service ( AWS KMS ) customer master key ( CMK ) with the log group , ingested
* data is encrypted using the CMK . This association is stored as long as the data encrypted with the CMK is still
* within Amazon CloudWatch Logs . This enables Amazon CloudWatch Logs to decrypt this data whenever it is requested .
* If you attempt to associate a CMK with the log group but the CMK does not exist or the CMK is disabled , you will
* receive an < code > InvalidParameterException < / code > error .
* @ param createLogGroupRequest
* @ return Result of the CreateLogGroup operation returned by the service .
* @ throws InvalidParameterException
* A parameter is specified incorrectly .
* @ throws ResourceAlreadyExistsException
* The specified resource already exists .
* @ throws LimitExceededException
* You have reached the maximum number of resources that can be created .
* @ throws OperationAbortedException
* Multiple requests to update the same resource were in conflict .
* @ throws ServiceUnavailableException
* The service cannot complete the request .
* @ sample AWSLogs . CreateLogGroup
* @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / logs - 2014-03-28 / CreateLogGroup " target = " _ top " > AWS API
* Documentation < / a > */
@ Override public CreateLogGroupResult createLogGroup ( CreateLogGroupRequest request ) { } } | request = beforeClientExecution ( request ) ; return executeCreateLogGroup ( request ) ; |
public class Challenge { /** * Create a ChallengeCreator to execute create .
* @ param pathServiceSid Service Sid .
* @ param pathIdentity Unique identity of the Entity
* @ param pathFactorSid Factor Sid .
* @ return ChallengeCreator capable of executing the create */
public static ChallengeCreator creator ( final String pathServiceSid , final String pathIdentity , final String pathFactorSid ) { } } | return new ChallengeCreator ( pathServiceSid , pathIdentity , pathFactorSid ) ; |
public class SeleniumDriverSetup { /** * Connects SeleniumHelper to a remote web driver , without specifying browser version .
* @ param browser name of browser to connect to .
* @ param url url to connect to browser .
* @ return true .
* @ throws MalformedURLException if supplied url can not be transformed to URL . */
public boolean connectToDriverForAt ( String browser , String url ) throws MalformedURLException { } } | return connectToDriverForVersionOnAt ( browser , "" , Platform . ANY . name ( ) , url ) ; |
public class Expressions { /** * Creates an IsLessThan expression from the given expression and constant .
* @ param left The left expression .
* @ param constant The constant to compare to ( must be a Number ) .
* @ throws IllegalArgumentException If the constant is not a Number
* @ return A new is less than binary expression . */
public static IsLessThan isLessThan ( ComparableExpression < Number > left , Object constant ) { } } | if ( ! ( constant instanceof Number ) ) throw new IllegalArgumentException ( "constant is not a Number" ) ; return new IsLessThan ( left , constant ( ( Number ) constant ) ) ; |
public class ManagedClustersInner { /** * Gets cluster user credential of a managed cluster .
* Gets cluster user credential of the managed cluster with a specified resource group and name .
* @ param resourceGroupName The name of the resource group .
* @ param resourceName The name of the managed cluster resource .
* @ param serviceCallback the async ServiceCallback to handle successful and failed responses .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the { @ link ServiceFuture } object */
public ServiceFuture < CredentialResultsInner > listClusterUserCredentialsAsync ( String resourceGroupName , String resourceName , final ServiceCallback < CredentialResultsInner > serviceCallback ) { } } | return ServiceFuture . fromResponse ( listClusterUserCredentialsWithServiceResponseAsync ( resourceGroupName , resourceName ) , serviceCallback ) ; |
public class Utils { /** * Convert timestamp in a string format to joda time
* @ param input timestamp
* @ param format timestamp format
* @ param timezone time zone of timestamp
* @ return joda time */
public static DateTime toDateTime ( String input , String format , String timezone ) { } } | String tz = StringUtils . defaultString ( timezone , ConfigurationKeys . DEFAULT_SOURCE_TIMEZONE ) ; DateTimeZone dateTimeZone = getTimeZone ( tz ) ; DateTimeFormatter inputDtFormat = DateTimeFormat . forPattern ( format ) . withZone ( dateTimeZone ) ; DateTime outputDateTime = inputDtFormat . parseDateTime ( input ) . withZone ( dateTimeZone ) ; return outputDateTime ; |
public class ValidatorContext { /** * 获取闭包
* @ param key 闭包名称
* @ return 闭包 */
public Closure getClosure ( String key ) { } } | if ( closures != null && ! closures . isEmpty ( ) ) { return closures . get ( key ) ; } return null ; |
public class FgExampleCache { /** * Gets the i ' th example . */
public LFgExample get ( int i ) { } } | LFgExample ex ; synchronized ( cache ) { ex = cache . get ( i ) ; } if ( ex == null ) { ex = exampleFactory . get ( i ) ; synchronized ( cache ) { cache . put ( i , ex ) ; } } return ex ; |
public class BucketManager { /** * 获取该空间下所有的domain
* @ param bucket
* @ return 该空间名下的domain
* @ throws QiniuException */
public String [ ] domainList ( String bucket ) throws QiniuException { } } | String url = String . format ( "%s/v6/domain/list?tbl=%s" , configuration . apiHost ( ) , bucket ) ; Response res = get ( url ) ; if ( ! res . isOK ( ) ) { throw new QiniuException ( res ) ; } String [ ] domains = res . jsonToObject ( String [ ] . class ) ; res . close ( ) ; return domains ; |
public class JsonService { /** * Get a child JSON array from a parent JSON object .
* @ param jsonObject The parent JSON object .
* @ param key The name of the child object .
* @ return Returns the child JSON array if it could be found , or null if the value was null .
* @ throws JSONException In case something went wrong while searching for the child . */
public static JSONArray getChildArray ( JSONObject jsonObject , String key ) throws JSONException { } } | checkArguments ( jsonObject , key ) ; JSONValue value = jsonObject . get ( key ) ; if ( value != null ) { if ( value . isArray ( ) != null ) { return value . isArray ( ) ; } else if ( value . isNull ( ) != null ) { return null ; } throw new JSONException ( "Child is not a JSONArray, but a: " + value . getClass ( ) ) ; } return null ; |
public class ObjectFactory { /** * Create an instance of { @ link JAXBElement } { @ code < } { @ link WaterBodyType } { @ code > }
* @ param value
* Java instance representing xml element ' s value .
* @ return
* the new instance of { @ link JAXBElement } { @ code < } { @ link WaterBodyType } { @ code > } */
@ XmlElementDecl ( namespace = "http://www.opengis.net/citygml/waterbody/1.0" , name = "WaterBody" , substitutionHeadNamespace = "http://www.opengis.net/citygml/waterbody/1.0" , substitutionHeadName = "_WaterObject" ) public JAXBElement < WaterBodyType > createWaterBody ( WaterBodyType value ) { } } | return new JAXBElement < WaterBodyType > ( _WaterBody_QNAME , WaterBodyType . class , null , value ) ; |
public class View { /** * Enables the automatic layout for this view , with the specified settings .
* @ param rankDirection the rank direction
* @ param rankSeparation the separation between ranks ( in pixels , a positive integer )
* @ param nodeSeparation the separation between nodes within the same rank ( in pixels , a positive integer )
* @ param edgeSeparation the separation between edges ( in pixels , a positive integer )
* @ param vertices whether vertices should be created during automatic layout */
public void setAutomaticLayout ( AutomaticLayout . RankDirection rankDirection , int rankSeparation , int nodeSeparation , int edgeSeparation , boolean vertices ) { } } | this . automaticLayout = new AutomaticLayout ( rankDirection , rankSeparation , nodeSeparation , edgeSeparation , vertices ) ; |
public class AbstractParamInjectionBinding { /** * real create the paramter object based CXF implementation
* @ param classType
* @ param genericType
* @ param memberAnnotations
* @ param paramInjectionMetadata
* @ return */
protected Object getInjectedObjectFromCXF ( Class < ? > classType , Type genericType , Annotation [ ] memberAnnotations , ParamInjectionMetadata paramInjectionMetadata ) { } } | Parameter p = ResourceUtils . getParameter ( 0 , memberAnnotations , classType ) ; Object injectedObject = null ; Message message = paramInjectionMetadata . getInMessage ( ) ; OperationResourceInfo ori = paramInjectionMetadata . getOperationResourceInfo ( ) ; BeanResourceInfo cri = ori . getClassResourceInfo ( ) ; MultivaluedMap < String , String > values = ( MultivaluedMap < String , String > ) message . get ( URITemplate . TEMPLATE_PARAMETERS ) ; if ( p . getType ( ) == ParameterType . BEAN && cri instanceof ClassResourceInfo ) { injectedObject = JAXRSUtils . createBeanParamValue ( message , classType , ori ) ; } else { injectedObject = JAXRSUtils . createHttpParameterValue ( p , classType , genericType , memberAnnotations , message , values , ori ) ; } return injectedObject ; |
public class ConstantPool { /** * Adds a new constant . */
public void addConstant ( ConstantPoolEntry entry ) { } } | if ( entry instanceof Utf8Constant ) { Utf8Constant utf8 = ( Utf8Constant ) entry ; _utf8Map . put ( utf8 . getValue ( ) , utf8 ) ; } _entries . add ( entry ) ; |
public class DeleteRetentionPolicyRequestMarshaller { /** * Marshall the given parameter object . */
public void marshall ( DeleteRetentionPolicyRequest deleteRetentionPolicyRequest , ProtocolMarshaller protocolMarshaller ) { } } | if ( deleteRetentionPolicyRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( deleteRetentionPolicyRequest . getLogGroupName ( ) , LOGGROUPNAME_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class CmsListResourceIconAction { /** * Generates a default html code where several buttons can have the same help text . < p >
* the only diff to < code > { @ link org . opencms . workplace . tools . A _ CmsHtmlIconButton # defaultButtonHtml ( org . opencms . workplace . tools . CmsHtmlIconButtonStyleEnum , String , String , String , boolean , String , String , String ) } < / code >
* is that the icons are 16x16 . < p >
* @ param cms the cms context , can be null
* @ param id the id
* @ param helpId the id of the helptext div tag
* @ param name the name , if empty only the icon is displayed
* @ param helpText the help text , if empty no mouse events are generated
* @ param enabled if enabled or not , if not set be sure to take an according helptext
* @ param iconPath the path to the icon , if empty only the name is displayed
* @ param onClick the js code to execute , if empty no link is generated
* @ param confirmationMessage the confirmation message
* @ param singleHelp if set , no helptext is written , you have to use the defaultHelpHtml ( ) method later
* @ return html code
* @ see org . opencms . workplace . tools . A _ CmsHtmlIconButton # defaultButtonHtml ( org . opencms . workplace . tools . CmsHtmlIconButtonStyleEnum , String , String , String , boolean , String , String , String ) */
protected String defButtonHtml ( CmsObject cms , String id , String helpId , String name , String helpText , boolean enabled , String iconPath , String confirmationMessage , String onClick , boolean singleHelp ) { } } | StringBuffer html = new StringBuffer ( 1024 ) ; html . append ( "\t<span class=\"link" ) ; if ( enabled ) { html . append ( "\"" ) ; } else { html . append ( " linkdisabled\"" ) ; } if ( CmsStringUtil . isNotEmptyOrWhitespaceOnly ( helpText ) ) { if ( ! singleHelp ) { html . append ( " onMouseOver=\"sMH('" ) ; html . append ( id ) ; html . append ( "');\" onMouseOut=\"hMH('" ) ; html . append ( id ) ; html . append ( "');\"" ) ; } else { html . append ( " onMouseOver=\"sMHS('" ) ; html . append ( id ) ; html . append ( "', '" ) ; html . append ( helpId ) ; html . append ( "');\" onMouseOut=\"hMH('" ) ; html . append ( id ) ; html . append ( "', '" ) ; html . append ( helpId ) ; html . append ( "');\"" ) ; } } if ( enabled && CmsStringUtil . isNotEmptyOrWhitespaceOnly ( onClick ) ) { html . append ( " onClick=\"" ) ; if ( CmsStringUtil . isNotEmptyOrWhitespaceOnly ( confirmationMessage ) ) { html . append ( "if (confirm('" + CmsStringUtil . escapeJavaScript ( confirmationMessage ) + "')) {" ) ; } html . append ( onClick ) ; if ( CmsStringUtil . isNotEmptyOrWhitespaceOnly ( confirmationMessage ) ) { html . append ( " }" ) ; } html . append ( "\"" ) ; } html . append ( " title='" ) ; html . append ( name ) ; html . append ( "'" ) ; html . append ( " style='display: block; width: 20px; height: 20px;'>" ) ; if ( CmsStringUtil . isNotEmptyOrWhitespaceOnly ( iconPath ) ) { html . append ( "<img src='" ) ; html . append ( CmsWorkplace . getSkinUri ( ) ) ; if ( ! enabled ) { StringBuffer icon = new StringBuffer ( 128 ) ; icon . append ( iconPath . substring ( 0 , iconPath . lastIndexOf ( '.' ) ) ) ; icon . append ( "_disabled" ) ; icon . append ( iconPath . substring ( iconPath . lastIndexOf ( '.' ) ) ) ; if ( cms != null ) { if ( cms . existsResource ( CmsWorkplace . VFS_PATH_RESOURCES + icon . toString ( ) ) ) { html . append ( icon ) ; } else { html . append ( iconPath ) ; } } else { html . append ( iconPath ) ; } } else { html . append ( iconPath ) ; } html . append ( "'" ) ; if ( CmsStringUtil . isNotEmptyOrWhitespaceOnly ( name ) ) { html . append ( " alt='" ) ; html . append ( name ) ; html . append ( "'" ) ; html . append ( " title='" ) ; html . append ( name ) ; html . append ( "'" ) ; } html . append ( "style='width: 16px; height: 16px;' >" ) ; } html . append ( "</span>\n" ) ; if ( CmsStringUtil . isNotEmptyOrWhitespaceOnly ( helpText ) && ! singleHelp ) { html . append ( "<div class='help' id='help" ) ; html . append ( helpId ) ; html . append ( "' onMouseOver=\"sMH('" ) ; html . append ( id ) ; html . append ( "');\" onMouseOut=\"hMH('" ) ; html . append ( id ) ; html . append ( "');\">" ) ; html . append ( helpText ) ; html . append ( "</div>\n" ) ; } return html . toString ( ) ; |
public class X509CertImpl { /** * Checks that the specified date is within the certificate ' s
* validity period , or basically if the certificate would be
* valid at the specified date / time .
* @ param date the Date to check against to see if this certificate
* is valid at that date / time .
* @ exception CertificateExpiredException if the certificate has expired
* with respect to the < code > date < / code > supplied .
* @ exception CertificateNotYetValidException if the certificate is not
* yet valid with respect to the < code > date < / code > supplied . */
public void checkValidity ( Date date ) throws CertificateExpiredException , CertificateNotYetValidException { } } | CertificateValidity interval = null ; try { interval = ( CertificateValidity ) info . get ( CertificateValidity . NAME ) ; } catch ( Exception e ) { throw new CertificateNotYetValidException ( "Incorrect validity period" ) ; } if ( interval == null ) throw new CertificateNotYetValidException ( "Null validity period" ) ; interval . valid ( date ) ; |
public class OngoingMatchingR1 { /** * Sets a { @ link Function } to execute if this matches . */
public FluentMatchingR < T , R > get ( Function < A , R > function ) { } } | return get ( fluentMatchingR , function ) ; |
public class NGAExtensions { /** * Delete the Feature Style extensions for the table
* @ param geoPackage
* GeoPackage
* @ param table
* table name
* @ since 3.2.0 */
public static void deleteFeatureStyle ( GeoPackageCore geoPackage , String table ) { } } | FeatureCoreStyleExtension featureStyleExtension = getFeatureStyleExtension ( geoPackage ) ; if ( featureStyleExtension . has ( table ) ) { featureStyleExtension . deleteRelationships ( table ) ; } |
public class SystemKeyspace { /** * This method is used to remove information about truncation time for specified column family */
public static synchronized void removeTruncationRecord ( UUID cfId ) { } } | String req = "DELETE truncated_at[?] from system.%s WHERE key = '%s'" ; executeInternal ( String . format ( req , LOCAL_CF , LOCAL_KEY ) , cfId ) ; truncationRecords = null ; forceBlockingFlush ( LOCAL_CF ) ; |
public class ServerExampleJar { /** * Register samples . */
private void registerSamples ( ) { } } | SamplePanelRegistry . registerFactory ( CATEGORY_WFS , new ShowcaseSampleDefinition ( ) { public SamplePanel create ( ) { return new WfsCapabilitiesPanel ( ) ; } public String getTitle ( ) { return MESSAGES . capabilitiesTitle ( ) ; } public String getShortDescription ( ) { return MESSAGES . capabilitiesShort ( ) ; } public String getDescription ( ) { return MESSAGES . capabilitiesDescription ( ) ; } public String getCategory ( ) { return CATEGORY_WFS ; } @ Override public String getKey ( ) { return "wfscapabilities" ; } } ) ; |
public class ResourceMetadata { /** * Merges the metrics and attributes from the given instance , to the current instance .
* @ param other a { @ link ResourceMetadata } object
* @ return true if the meta - data was modified as a result of the merge , false otherwise */
public boolean merge ( ResourceMetadata other ) { } } | boolean modified = m_metrics . addAll ( other . m_metrics ) ; if ( ! modified ) { modified = ! m_attributes . equals ( other . m_attributes ) ; } m_attributes . putAll ( other . m_attributes ) ; return modified ; |
public class GeoJsonReaderDriver { /** * Parses the geometries to return its properties
* @ param jp
* @ throws IOException
* @ throws SQLException */
private void parseParentGeometryMetadata ( JsonParser jp ) throws IOException , SQLException { } } | if ( jp . nextToken ( ) != JsonToken . VALUE_NULL ) { // START _ OBJECT { in case of null geometry
jp . nextToken ( ) ; // FIELD _ NAME type
jp . nextToken ( ) ; // VALUE _ STRING Point
String geometryType = jp . getText ( ) ; parseGeometryMetadata ( jp , geometryType ) ; } |
public class ResourceAddressFactorySpi { /** * These options are removed and set in { @ link # parseNamedOptions ( java . net . URI , ResourceOptions , java . util . Map }
* above , so we need to include them in the new options by name map used for alternate construction . */
@ SuppressWarnings ( "JavadocReference" ) protected Map < String , Object > getNewOptionsByName ( ResourceOptions options , Map < String , Object > optionsByName ) { } } | Map < String , Object > clonedOptionsByName = new HashMap < > ( optionsByName ) ; clonedOptionsByName . put ( NEXT_PROTOCOL . name ( ) , options . getOption ( NEXT_PROTOCOL ) ) ; clonedOptionsByName . put ( QUALIFIER . name ( ) , options . getOption ( QUALIFIER ) ) ; clonedOptionsByName . put ( TRANSPORT_URI . name ( ) , options . getOption ( TRANSPORT_URI ) ) ; clonedOptionsByName . put ( BIND_ALTERNATE . name ( ) , options . getOption ( BIND_ALTERNATE ) ) ; clonedOptionsByName . put ( RESOLVER . name ( ) , options . getOption ( RESOLVER ) ) ; return clonedOptionsByName ; |
public class ExistsRule { /** * Create an instance of ExistsRule using the
* top name on the stack .
* @ param stack stack
* @ return instance of ExistsRule . */
public static Rule getRule ( final Stack stack ) { } } | if ( stack . size ( ) < 1 ) { throw new IllegalArgumentException ( "Invalid EXISTS rule - expected one parameter but received " + stack . size ( ) ) ; } return new ExistsRule ( stack . pop ( ) . toString ( ) ) ; |
public class SRTServletRequest { /** * PQ94384 */
public void addParameter ( String name , String [ ] values ) { } } | if ( WCCustomProperties . CHECK_REQUEST_OBJECT_IN_USE ) { checkRequestObjectInUse ( ) ; } Hashtable aParam = new Hashtable ( 3 ) ; aParam . put ( name , values ) ; mergeQueryParams ( aParam ) ; |
public class WordCluster { /** * 合并c1和c2
* @ param c1
* @ param c2 */
protected void merge ( int c1 , int c2 ) { } } | int newid = lastid ++ ; heads . put ( c1 , newid ) ; heads . put ( c2 , newid ) ; TIntFloatHashMap newpcc = new TIntFloatHashMap ( ) ; TIntFloatHashMap inewpcc = new TIntFloatHashMap ( ) ; TIntFloatHashMap newwcc = new TIntFloatHashMap ( ) ; float pc1 = wordProb . get ( c1 ) ; float pc2 = wordProb . get ( c2 ) ; // 新类的概率
float pc = pc1 + pc2 ; float w ; { float pcc1 = getProb ( c1 , c1 ) ; float pcc2 = getProb ( c2 , c2 ) ; float pcc3 = getProb ( c1 , c2 ) ; float pcc4 = getProb ( c2 , c1 ) ; float pcc = pcc1 + pcc2 + pcc3 + pcc4 ; if ( pcc != 0.0f ) newpcc . put ( newid , pcc ) ; w = clacW ( pcc , pc , pc ) ; if ( w != 0.0f ) newwcc . put ( newid , w ) ; } TIntIterator it = slots . iterator ( ) ; while ( it . hasNext ( ) ) { int k = it . next ( ) ; float pck = wordProb . get ( k ) ; if ( c1 == k || c2 == k ) { continue ; } else { float pcc1 = getProb ( c1 , k ) ; float pcc2 = getProb ( c2 , k ) ; float pcc12 = pcc1 + pcc2 ; if ( pcc12 != 0.0f ) newpcc . put ( newid , pcc12 ) ; float p1 = clacW ( pcc12 , pc , pck ) ; float pcc3 = getProb ( k , c1 ) ; float pcc4 = getProb ( k , c2 ) ; float pcc34 = pcc3 + pcc4 ; if ( pcc34 != 0.0f ) inewpcc . put ( k , pcc34 ) ; float p2 = clacW ( pcc34 , pck , pc ) ; w = p1 + p2 ; if ( w != 0.0f ) newwcc . put ( newid , w ) ; } } // 更新slots
slots . remove ( c1 ) ; slots . remove ( c2 ) ; slots . add ( newid ) ; pcc . put ( newid , newpcc ) ; pcc . remove ( c1 ) ; pcc . remove ( c2 ) ; TIntFloatIterator it2 = inewpcc . iterator ( ) ; while ( it2 . hasNext ( ) ) { it2 . advance ( ) ; TIntFloatHashMap pmap = pcc . get ( it2 . key ( ) ) ; // if ( pmap = = null ) {
// pmap = new TIntFloatHashMap ( ) ;
// pcc . put ( it2 . key ( ) , pmap ) ;
pmap . put ( newid , it2 . value ( ) ) ; pmap . remove ( c1 ) ; pmap . remove ( c2 ) ; } // newid 永远大于 it3 . key ;
wcc . put ( newid , new TIntFloatHashMap ( ) ) ; wcc . remove ( c1 ) ; wcc . remove ( c2 ) ; TIntFloatIterator it3 = newwcc . iterator ( ) ; while ( it3 . hasNext ( ) ) { it3 . advance ( ) ; TIntFloatHashMap pmap = wcc . get ( it3 . key ( ) ) ; pmap . put ( newid , it3 . value ( ) ) ; pmap . remove ( c1 ) ; pmap . remove ( c2 ) ; } wordProb . remove ( c1 ) ; wordProb . remove ( c2 ) ; wordProb . put ( newid , pc ) ; // 修改cluster
Cluster cluster = new Cluster ( newid , clusters . get ( c1 ) , clusters . get ( c2 ) , pc ) ; clusters . put ( newid , cluster ) ; System . out . println ( "合并:" + cluster . rep ) ; |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.