signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class PollerBase { /** * Cancel the timer created by sink _ object with ID equal to id _ . */ public void cancelTimer ( IPollEvents sink , int id ) { } }
assert ( Thread . currentThread ( ) == worker ) ; TimerInfo copy = new TimerInfo ( sink , id ) ; // Complexity of this operation is O ( n ) . We assume it is rarely used . TimerInfo timerInfo = timers . find ( copy ) ; if ( timerInfo != null ) { // let ' s defer the removal during the loop timerInfo . cancelled = true ; }
public class MCARGImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public boolean eIsSet ( int featureID ) { } }
switch ( featureID ) { case AfplibPackage . MCARG__RG_LENGTH : return RG_LENGTH_EDEFAULT == null ? rgLength != null : ! RG_LENGTH_EDEFAULT . equals ( rgLength ) ; case AfplibPackage . MCARG__TRIPLETS : return triplets != null && ! triplets . isEmpty ( ) ; } return super . eIsSet ( featureID ) ;
public class InternalXtextParser { /** * InternalXtext . g : 3474:1 : ruleCharacterRange returns [ EObject current = null ] : ( this _ Keyword _ 0 = ruleKeyword ( ( ) otherlv _ 2 = ' . . ' ( ( lv _ right _ 3_0 = ruleKeyword ) ) ) ? ) ; */ public final EObject ruleCharacterRange ( ) throws RecognitionException { } }
EObject current = null ; Token otherlv_2 = null ; EObject this_Keyword_0 = null ; EObject lv_right_3_0 = null ; enterRule ( ) ; try { // InternalXtext . g : 3480:2 : ( ( this _ Keyword _ 0 = ruleKeyword ( ( ) otherlv _ 2 = ' . . ' ( ( lv _ right _ 3_0 = ruleKeyword ) ) ) ? ) ) // InternalXtext . g : 3481:2 : ( this _ Keyword _ 0 = ruleKeyword ( ( ) otherlv _ 2 = ' . . ' ( ( lv _ right _ 3_0 = ruleKeyword ) ) ) ? ) { // InternalXtext . g : 3481:2 : ( this _ Keyword _ 0 = ruleKeyword ( ( ) otherlv _ 2 = ' . . ' ( ( lv _ right _ 3_0 = ruleKeyword ) ) ) ? ) // InternalXtext . g : 3482:3 : this _ Keyword _ 0 = ruleKeyword ( ( ) otherlv _ 2 = ' . . ' ( ( lv _ right _ 3_0 = ruleKeyword ) ) ) ? { newCompositeNode ( grammarAccess . getCharacterRangeAccess ( ) . getKeywordParserRuleCall_0 ( ) ) ; pushFollow ( FollowSets000 . FOLLOW_49 ) ; this_Keyword_0 = ruleKeyword ( ) ; state . _fsp -- ; current = this_Keyword_0 ; afterParserOrEnumRuleCall ( ) ; // InternalXtext . g : 3490:3 : ( ( ) otherlv _ 2 = ' . . ' ( ( lv _ right _ 3_0 = ruleKeyword ) ) ) ? int alt73 = 2 ; int LA73_0 = input . LA ( 1 ) ; if ( ( LA73_0 == 49 ) ) { alt73 = 1 ; } switch ( alt73 ) { case 1 : // InternalXtext . g : 3491:4 : ( ) otherlv _ 2 = ' . . ' ( ( lv _ right _ 3_0 = ruleKeyword ) ) { // InternalXtext . g : 3491:4 : ( ) // InternalXtext . g : 3492:5: { current = forceCreateModelElementAndSet ( grammarAccess . getCharacterRangeAccess ( ) . getCharacterRangeLeftAction_1_0 ( ) , current ) ; } otherlv_2 = ( Token ) match ( input , 49 , FollowSets000 . FOLLOW_11 ) ; newLeafNode ( otherlv_2 , grammarAccess . getCharacterRangeAccess ( ) . getFullStopFullStopKeyword_1_1 ( ) ) ; // InternalXtext . g : 3502:4 : ( ( lv _ right _ 3_0 = ruleKeyword ) ) // InternalXtext . g : 3503:5 : ( lv _ right _ 3_0 = ruleKeyword ) { // InternalXtext . g : 3503:5 : ( lv _ right _ 3_0 = ruleKeyword ) // InternalXtext . g : 3504:6 : lv _ right _ 3_0 = ruleKeyword { newCompositeNode ( grammarAccess . getCharacterRangeAccess ( ) . getRightKeywordParserRuleCall_1_2_0 ( ) ) ; pushFollow ( FollowSets000 . FOLLOW_2 ) ; lv_right_3_0 = ruleKeyword ( ) ; state . _fsp -- ; if ( current == null ) { current = createModelElementForParent ( grammarAccess . getCharacterRangeRule ( ) ) ; } set ( current , "right" , lv_right_3_0 , "org.eclipse.xtext.Xtext.Keyword" ) ; afterParserOrEnumRuleCall ( ) ; } } } break ; } } } leaveRule ( ) ; } catch ( RecognitionException re ) { recover ( input , re ) ; appendSkippedTokens ( ) ; } finally { } return current ;
public class DatabaseFullPrunedBlockStore { /** * Deletes the store by deleting the tables within the database . * @ throws BlockStoreException If tables couldn ' t be deleted . */ public void deleteStore ( ) throws BlockStoreException { } }
maybeConnect ( ) ; try { Statement s = conn . get ( ) . createStatement ( ) ; for ( String sql : getDropTablesSQL ( ) ) { s . execute ( sql ) ; } s . close ( ) ; } catch ( SQLException ex ) { throw new RuntimeException ( ex ) ; }
public class ArrayHashCode { /** * Given an { @ link ExpressionTree } that represents an argument of array type , rewrites it to wrap * it in a call to either { @ link java . util . Arrays # hashCode } if it is single dimensional , or { @ link * java . util . Arrays # deepHashCode } if it is multidimensional . */ private static String rewriteArrayArgument ( ExpressionTree arg , VisitorState state ) { } }
Types types = state . getTypes ( ) ; Type argType = ASTHelpers . getType ( arg ) ; Preconditions . checkState ( types . isArray ( argType ) , "arg must be of array type" ) ; if ( types . isArray ( types . elemtype ( argType ) ) ) { return "Arrays.deepHashCode(" + state . getSourceForNode ( arg ) + ")" ; } else { return "Arrays.hashCode(" + state . getSourceForNode ( arg ) + ")" ; }
public class Logging { /** * Kills zombie targets ( i . e . appenders ) */ public void kill_zombie_appenders ( ) { } }
// - Get all devices Vector dl = Util . instance ( ) . get_device_list ( "*" ) ; // - Check appenders validity then kill them if needed for ( Object aDl : dl ) { // - Get device reference DeviceImpl dev = ( DeviceImpl ) aDl ; // - Get device logger Logger logger = dev . get_logger ( ) ; if ( logger != null ) { Enumeration all_appenders = logger . getAllAppenders ( ) ; while ( all_appenders . hasMoreElements ( ) ) { Appender appender = ( Appender ) all_appenders . nextElement ( ) ; if ( ( ( TangoAppender ) appender ) . isValid ( ) == false ) { Util . out4 . println ( "Removing zombie appender " + dev . get_name ( ) + LOGGING_SEPARATOR + appender . getName ( ) ) ; logger . removeAppender ( appender ) ; } } } }
public class ProxySpinnerAdapter { /** * Inflates and returns the view , which is used to display the hint . * @ param parent * The parent view of the view , which should be inflated , as an instance of the class * { @ link ViewGroup } or null , if no parent view is available * @ return The view , which has been inflated , as an instance of the class { @ link View } */ private View inflateHintView ( @ Nullable final ViewGroup parent ) { } }
TextView view = ( TextView ) LayoutInflater . from ( context ) . inflate ( hintViewId , parent , false ) ; view . setText ( hint ) ; if ( hintColor != null ) { view . setTextColor ( hintColor ) ; } return view ;
public class StorableGenerator { /** * Generates a copy properties method with several options to control its * behavior . Although eight combinations can be defined , only four are * required by Storable interface . Uninitialized properties are never * copied . * @ param pkProperties when true , copy primary key properties * @ param dataProperties when true , copy data properties * @ param unequalOnly when true , only copy unequal properties * @ param dirtyOnly when true , only copy dirty properties */ private void addCopyPropertiesMethod ( String methodName , boolean pkProperties , boolean versionProperty , boolean dataProperties , boolean unequalOnly , boolean dirtyOnly ) { } }
TypeDesc [ ] param = { TypeDesc . forClass ( Storable . class ) } ; TypeDesc storableTypeDesc = TypeDesc . forClass ( mStorableType ) ; MethodInfo mi = addMethodIfNotFinal ( Modifiers . PUBLIC . toSynchronized ( true ) , methodName , null , param ) ; if ( mi == null ) { return ; } CodeBuilder b = new CodeBuilder ( mi ) ; LocalVariable target = CodeBuilderUtil . uneraseGenericParameter ( b , storableTypeDesc , 0 ) ; LocalVariable stateBits = null ; int mask = PROPERTY_STATE_MASK ; for ( StorableProperty property : mAllProperties . values ( ) ) { // Decide if property should be part of the copy . boolean shouldCopy = ( ! property . isDerived ( ) || property . shouldCopyDerived ( ) ) && ! property . isJoin ( ) && ( property . isPrimaryKeyMember ( ) && pkProperties || property . isVersion ( ) && versionProperty || ! property . isPrimaryKeyMember ( ) && dataProperties ) ; if ( shouldCopy ) { int ordinal = property . getNumber ( ) ; if ( stateBits == null && ! property . isDerived ( ) ) { // Load state bits into local for quick retrieval . stateBits = b . createLocalVariable ( null , TypeDesc . INT ) ; String stateFieldName = StorableGenerator . PROPERTY_STATE_FIELD_NAME + ( ordinal >> 4 ) ; b . loadThis ( ) ; b . loadField ( stateFieldName , TypeDesc . INT ) ; b . storeLocal ( stateBits ) ; } Label skipCopy = b . createLabel ( ) ; // Check if independent property is supported , and skip if not . if ( property . isIndependent ( ) ) { addSkipIndependent ( b , target , property , skipCopy ) ; } if ( stateBits != null && ! property . isDerived ( ) ) { // Skip property if uninitialized . b . loadLocal ( stateBits ) ; b . loadConstant ( mask ) ; b . math ( Opcode . IAND ) ; b . ifZeroComparisonBranch ( skipCopy , "==" ) ; if ( dirtyOnly ) { // Add code to find out if property has been dirty . b . loadLocal ( stateBits ) ; b . loadConstant ( mask ) ; b . math ( Opcode . IAND ) ; b . loadConstant ( PROPERTY_STATE_DIRTY << ( ( ordinal & 0xf ) * 2 ) ) ; b . ifComparisonBranch ( skipCopy , "!=" ) ; } } TypeDesc type = TypeDesc . forClass ( property . getType ( ) ) ; if ( unequalOnly ) { // Add code to find out if they ' re equal . loadThisProperty ( b , property , type ) ; // [ this . propValue b . loadLocal ( target ) ; // [ this . propValue , target b . invoke ( property . getReadMethod ( ) ) ; // [ this . propValue , target . propValue CodeBuilderUtil . addValuesEqualCall ( b , TypeDesc . forClass ( property . getType ( ) ) , true , skipCopy , true ) ; } b . loadLocal ( target ) ; // [ target loadThisProperty ( b , property , type ) ; // [ target , this . propValue if ( property . getWriteMethod ( ) != null ) { // Favor the write method , if it exists . b . invoke ( property . getWriteMethod ( ) ) ; } else { b . storeField ( property . getName ( ) , type ) ; } skipCopy . setLocation ( ) ; } if ( ( mask <<= 2 ) == 0 ) { mask = PROPERTY_STATE_MASK ; stateBits = null ; } } b . returnVoid ( ) ;
public class OffsetRange { /** * Provides an { @ link Ordering } of { @ link OffsetRange } s by their start position . Note that this is * not a total ordering because { @ link OffsetRange } s with the same start position but different * end positions will compare as equal . * Consider producing a compound ordering with { @ link # byEndOrdering ( ) } using { @ link * Ordering # compound ( Comparator ) } or using one of the predefined total orderings . */ public static < T extends Offset < T > > Ordering < OffsetRange < T > > byStartOrdering ( ) { } }
return Ordering . < T > natural ( ) . onResultOf ( OffsetRange . < T > toStartInclusiveFunction ( ) ) ;
public class VersionsImpl { /** * Gets the application versions info . * @ param appId The application ID . * @ param listOptionalParameter the object representing the optional parameters to be set before calling this API * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the List & lt ; VersionInfo & gt ; object */ public Observable < ServiceResponse < List < VersionInfo > > > listWithServiceResponseAsync ( UUID appId , ListVersionsOptionalParameter listOptionalParameter ) { } }
if ( this . client . endpoint ( ) == null ) { throw new IllegalArgumentException ( "Parameter this.client.endpoint() is required and cannot be null." ) ; } if ( appId == null ) { throw new IllegalArgumentException ( "Parameter appId is required and cannot be null." ) ; } final Integer skip = listOptionalParameter != null ? listOptionalParameter . skip ( ) : null ; final Integer take = listOptionalParameter != null ? listOptionalParameter . take ( ) : null ; return listWithServiceResponseAsync ( appId , skip , take ) ;
public class AuthorizationExceptionDto { /** * transformer / / / / / */ public static AuthorizationExceptionDto fromException ( AuthorizationException e ) { } }
AuthorizationExceptionDto dto = new AuthorizationExceptionDto ( ) ; dto . setMessage ( e . getMessage ( ) ) ; dto . setType ( AuthorizationException . class . getSimpleName ( ) ) ; dto . setUserId ( e . getUserId ( ) ) ; dto . setMissingAuthorizations ( MissingAuthorizationDto . fromInfo ( e . getMissingAuthorizations ( ) ) ) ; dto . setPermissionName ( e . getViolatedPermissionName ( ) ) ; dto . setResourceId ( e . getResourceId ( ) ) ; dto . setResourceName ( e . getResourceType ( ) ) ; return dto ;
public class MediaWikiBot { /** * TODO ' data ' is not very descriptive */ public ImmutableList < Optional < SimpleArticle > > readDataOpt ( String ... names ) { } }
return readDataOpt ( ImmutableList . copyOf ( names ) ) ;
public class ProducerSequenceFactory { /** * Bitmap cache get - > thread hand off - > multiplex - > bitmap cache * @ param inputProducer producer providing the input to the bitmap cache * @ return bitmap cache get to bitmap cache sequence */ private Producer < CloseableReference < CloseableImage > > newBitmapCacheGetToBitmapCacheSequence ( Producer < CloseableReference < CloseableImage > > inputProducer ) { } }
BitmapMemoryCacheProducer bitmapMemoryCacheProducer = mProducerFactory . newBitmapMemoryCacheProducer ( inputProducer ) ; BitmapMemoryCacheKeyMultiplexProducer bitmapKeyMultiplexProducer = mProducerFactory . newBitmapMemoryCacheKeyMultiplexProducer ( bitmapMemoryCacheProducer ) ; ThreadHandoffProducer < CloseableReference < CloseableImage > > threadHandoffProducer = mProducerFactory . newBackgroundThreadHandoffProducer ( bitmapKeyMultiplexProducer , mThreadHandoffProducerQueue ) ; return mProducerFactory . newBitmapMemoryCacheGetProducer ( threadHandoffProducer ) ;
public class ChaincodeCollectionConfiguration { /** * Creates a new ChaincodeCollectionConfiguration instance configured with details supplied in YAML format * @ param configStream A stream opened on a YAML document containing network configuration details * @ return A new ChaincodeCollectionConfiguration instance * @ throws InvalidArgumentException */ public static ChaincodeCollectionConfiguration fromYamlStream ( InputStream configStream ) throws InvalidArgumentException , ChaincodeCollectionConfigurationException { } }
logger . trace ( "ChaincodeCollectionConfiguration.fromYamlStream..." ) ; // Sanity check if ( configStream == null ) { throw new InvalidArgumentException ( "ConfigStream must be specified" ) ; } Yaml yaml = new Yaml ( ) ; @ SuppressWarnings ( "unchecked" ) List < Object > map = yaml . load ( configStream ) ; JsonArrayBuilder builder = Json . createArrayBuilder ( map ) ; JsonArray jsonConfig = builder . build ( ) ; return fromJsonObject ( jsonConfig ) ;
public class SCoveragePostCompileMojo { /** * Restores project original configuration after compilation with SCoverage instrumentation . */ @ Override public void execute ( ) { } }
if ( "pom" . equals ( project . getPackaging ( ) ) ) { return ; } if ( skip ) { return ; } long ts = System . currentTimeMillis ( ) ; Properties projectProperties = project . getProperties ( ) ; restoreProperty ( projectProperties , "sbt._scalacOptions" ) ; restoreProperty ( projectProperties , "sbt._scalacPlugins" ) ; restoreProperty ( projectProperties , "addScalacArgs" ) ; restoreProperty ( projectProperties , "analysisCacheFile" ) ; restoreProperty ( projectProperties , "maven.test.failure.ignore" ) ; long te = System . currentTimeMillis ( ) ; getLog ( ) . debug ( String . format ( "Mojo execution time: %d ms" , te - ts ) ) ;
public class SqlQueryStatement { /** * build the Join - Information for name * functions and the last segment are removed * ie : avg ( accounts . amount ) - > accounts */ private void buildJoinTreeForColumn ( String aColName , boolean useOuterJoin , UserAlias aUserAlias , Map pathClasses ) { } }
String pathName = SqlHelper . cleanPath ( aColName ) ; int sepPos = pathName . lastIndexOf ( "." ) ; if ( sepPos >= 0 ) { getTableAlias ( pathName . substring ( 0 , sepPos ) , useOuterJoin , aUserAlias , new String [ ] { pathName . substring ( sepPos + 1 ) } , pathClasses ) ; }
public class GenericServlet { /** * Writes an explanatory message and a stack trace * for a given < code > Throwable < / code > exception * to the servlet log file , prepended by the servlet ' s name . * See { @ link ServletContext # log ( String , Throwable ) } . * @ param message a < code > String < / code > that describes * the error or exception * @ param tthe < code > java . lang . Throwable < / code > error * or exception */ public void log ( String message , Throwable t ) { } }
getServletContext ( ) . log ( getServletName ( ) + ": " + message , t ) ;
public class SearchIterator { /** * Sets the position in the target text at which the next search will start . * This method clears any previous match . * @ param position position from which to start the next search * @ exception IndexOutOfBoundsException thrown if argument position is out * of the target text range . * @ see # getIndex */ public void setIndex ( int position ) { } }
if ( position < search_ . beginIndex ( ) || position > search_ . endIndex ( ) ) { throw new IndexOutOfBoundsException ( "setIndex(int) expected position to be between " + search_ . beginIndex ( ) + " and " + search_ . endIndex ( ) ) ; } search_ . reset_ = false ; search_ . setMatchedLength ( 0 ) ; search_ . matchedIndex_ = DONE ;
public class IsoInterval { /** * < p > Liefert die Rechenbasis zur Ermittlung einer Dauer . < / p > * @ return & auml ; quivalenter Zeitpunkt bei geschlossener unterer Grenze * @ throws UnsupportedOperationException wenn unendlich */ T getTemporalOfClosedStart ( ) { } }
T temporal = this . start . getTemporal ( ) ; if ( temporal == null ) { throw new UnsupportedOperationException ( "An infinite interval has no finite duration." ) ; } else if ( this . start . isOpen ( ) ) { return this . getTimeLine ( ) . stepForward ( temporal ) ; } else { return temporal ; }
public class ScroogeReadSupport { /** * Updated method from ReadSupport which checks if the projection ' s compatible instead of a * stricter check to see if the file ' s schema contains the projection * @ param fileMessageType * @ param projectedMessageType * @ return */ public static MessageType getSchemaForRead ( MessageType fileMessageType , MessageType projectedMessageType ) { } }
assertGroupsAreCompatible ( fileMessageType , projectedMessageType ) ; return projectedMessageType ;
public class CommerceNotificationAttachmentLocalServiceBaseImpl { /** * Updates the commerce notification attachment in the database or adds it if it does not yet exist . Also notifies the appropriate model listeners . * @ param commerceNotificationAttachment the commerce notification attachment * @ return the commerce notification attachment that was updated */ @ Indexable ( type = IndexableType . REINDEX ) @ Override public CommerceNotificationAttachment updateCommerceNotificationAttachment ( CommerceNotificationAttachment commerceNotificationAttachment ) { } }
return commerceNotificationAttachmentPersistence . update ( commerceNotificationAttachment ) ;
public class LevenshteinDistance { /** * Get the distance of the 2 strings , using the costs 1 for insertion , * deletion and substitution . * @ param aStr1 * First string . * @ param aStr2 * Second string . * @ return The Levenshtein distance . */ public static int getDistance ( @ Nullable final char [ ] aStr1 , @ Nullable final char [ ] aStr2 ) { } }
final int nLen1 = aStr1 == null ? 0 : aStr1 . length ; final int nLen2 = aStr2 == null ? 0 : aStr2 . length ; if ( nLen1 == 0 ) return nLen2 ; if ( nLen2 == 0 ) return nLen1 ; return _getDistance111 ( aStr1 , nLen1 , aStr2 , nLen2 ) ;
public class RefundService { /** * Returns and refresh detailed informations of a specific { @ link Refund } . * @ param refund * A { @ link Refund } with Id . * @ return Refreshed instance of the given { @ link Refund } . */ public Refund get ( Refund refund ) { } }
return RestfulUtils . show ( RefundService . PATH , refund , Refund . class , super . httpClient ) ;
public class JdbcUtil { /** * Imports the data from < code > DataSet < / code > to database . * @ param dataset * @ param offset * @ param count * @ param conn * @ param insertSQL the column order in the sql must be consistent with the column order in the DataSet . Here is sample about how to create the sql : * < pre > < code > * List < String > columnNameList = new ArrayList < > ( dataset . columnNameList ( ) ) ; * columnNameList . retainAll ( yourSelectColumnNames ) ; * String sql = RE . insert ( columnNameList ) . into ( tableName ) . sql ( ) ; * < / code > < / pre > * @ param batchSize * @ param batchInterval * @ param columnTypeMap * @ return * @ throws UncheckedSQLException */ @ SuppressWarnings ( "rawtypes" ) public static int importData ( final DataSet dataset , final int offset , final int count , final Connection conn , final String insertSQL , final int batchSize , final int batchInterval , final Map < String , ? extends Type > columnTypeMap ) throws UncheckedSQLException { } }
return importData ( dataset , offset , count , Fn . alwaysTrue ( ) , conn , insertSQL , batchSize , batchInterval , columnTypeMap ) ;
public class CassandraCqlMapState { @ SuppressWarnings ( "rawtypes" ) public static StateFactory opaque ( CqlRowMapper mapper ) { } }
Options < OpaqueValue > options = new Options < OpaqueValue > ( ) ; return opaque ( mapper , options ) ;
public class CSVSummariser { /** * Writes summary values and a stub mapping file based on the given * { @ link JDefaultDict } s . * @ param maxSampleCount * The maximum number of samples to write out * @ param emptyCounts * A { @ link JDefaultDict } containing the empty counts for each field * @ param nonEmptyCounts * A { @ link JDefaultDict } containing the non - empty counts for each * field * @ param possibleIntegerFields * A { @ link JDefaultDict } containing true if the field is possibly * integer and false otherwise * @ param possibleDoubleFields * A { @ link JDefaultDict } containing true if the field is possibly * double and false otherwise * @ param valueCounts * A { @ link JDefaultDict } containing values for each field and * attached counts * @ param headers * The headers that were either given or substituted * @ param rowCount * The total row count from the input file , used to determine if the * number of unique values matches the total number of rows * @ param showSampleCounts * True to attach sample counts to the sample output , and false to * omit it * @ param output * The { @ link Writer } to contain the summarised statistics * @ param mappingOutput * The { @ link Writer } to contain the stub mapping file * @ throws IOException * If there is an error writing */ static void writeForSummarise ( final int maxSampleCount , final JDefaultDict < String , AtomicInteger > emptyCounts , final JDefaultDict < String , AtomicInteger > nonEmptyCounts , final JDefaultDict < String , AtomicBoolean > possibleIntegerFields , final JDefaultDict < String , AtomicBoolean > possibleDoubleFields , final JDefaultDict < String , JDefaultDict < String , AtomicInteger > > valueCounts , final List < String > headers , final AtomicInteger rowCount , final boolean showSampleCounts , final Writer output , final Writer mappingOutput ) throws IOException { } }
// This schema defines the fields and order for the columns in the // summary CSV file final CsvSchema summarySchema = getSummaryCsvSchema ( ) ; final CsvSchema mappingSchema = getMappingCsvSchema ( ) ; // Shared StringBuilder across fields for efficiency // After each field the StringBuilder is truncated final StringBuilder sharedSampleValueBuilder = new StringBuilder ( ) ; final BiConsumer < ? super String , ? super String > sampleHandler = ( nextSample , nextCount ) -> { if ( sharedSampleValueBuilder . length ( ) > 0 ) { sharedSampleValueBuilder . append ( ", " ) ; } if ( nextSample . length ( ) > 200 ) { sharedSampleValueBuilder . append ( nextSample . substring ( 0 , 200 ) ) ; sharedSampleValueBuilder . append ( "..." ) ; } else { sharedSampleValueBuilder . append ( nextSample ) ; } if ( showSampleCounts ) { sharedSampleValueBuilder . append ( "(*" + nextCount + ")" ) ; } } ; try ( final SequenceWriter csvWriter = CSVStream . newCSVWriter ( output , summarySchema ) ; final SequenceWriter mappingWriter = CSVStream . newCSVWriter ( mappingOutput , mappingSchema ) ; ) { // Need to do this to get the header line written out in this case if ( rowCount . get ( ) == 0 ) { csvWriter . write ( Arrays . asList ( ) ) ; mappingWriter . write ( Arrays . asList ( ) ) ; } headers . forEach ( nextHeader -> { try { final int emptyCount = emptyCounts . get ( nextHeader ) . get ( ) ; final int nonEmptyCount = nonEmptyCounts . get ( nextHeader ) . get ( ) ; JDefaultDict < String , AtomicInteger > nextValueCount = valueCounts . get ( nextHeader ) ; final int valueCount = nextValueCount . keySet ( ) . size ( ) ; final boolean possiblePrimaryKey = valueCount == nonEmptyCount && valueCount == rowCount . get ( ) ; boolean possiblyInteger = false ; boolean possiblyDouble = false ; // Only expose our numeric type guess if non - empty values // found // This is important , as it should default to true unless // evidence to the contrary is found , with the total number of observations , // when equal to 0 , being used to identify the false positive cases if ( nonEmptyCount > 0 ) { possiblyInteger = possibleIntegerFields . get ( nextHeader ) . get ( ) ; possiblyDouble = possibleDoubleFields . get ( nextHeader ) . get ( ) ; } final Stream < String > stream = nextValueCount . keySet ( ) . stream ( ) ; if ( maxSampleCount > 0 ) { stream . limit ( maxSampleCount ) . sorted ( ) . forEach ( s -> sampleHandler . accept ( s , nextValueCount . get ( s ) . toString ( ) ) ) ; if ( valueCount > maxSampleCount ) { sharedSampleValueBuilder . append ( ", ..." ) ; } } else if ( maxSampleCount < 0 ) { stream . sorted ( ) . forEach ( s -> sampleHandler . accept ( s , nextValueCount . get ( s ) . toString ( ) ) ) ; } csvWriter . write ( Arrays . asList ( nextHeader , emptyCount , nonEmptyCount , valueCount , possiblePrimaryKey , possiblyInteger , possiblyDouble , sharedSampleValueBuilder ) ) ; final String mappingFieldType = possiblyInteger ? "INTEGER" : possiblyDouble ? "DECIMAL" : "TEXT" ; mappingWriter . write ( Arrays . asList ( nextHeader , nextHeader , "" , ValueMapping . ValueMappingLanguage . DBSCHEMA . name ( ) , mappingFieldType ) ) ; } catch ( Exception e ) { throw new RuntimeException ( e ) ; } finally { // Very important to reset this shared StringBuilder after // each row is written sharedSampleValueBuilder . setLength ( 0 ) ; } } ) ; }
public class AbstractView { /** * Add an event handler on the given node according to annotation OnXxxxx . * @ param target the graphical node , must be not null ( is a subtype of EventTarget ) * @ param annotation the OnXxxx annotation * @ throws CoreException if an error occurred while linking the event handler */ private void addHandler ( final EventTarget target , final Annotation annotation ) throws CoreException { } }
// Build the auto event handler for this annotation final AnnotationEventHandler < Event > aeh = new AnnotationEventHandler < > ( this . callbackObject , annotation ) ; for ( final EnumEventType eet : ( EnumEventType [ ] ) ClassUtility . getAnnotationAttribute ( annotation , "value" ) ) { if ( target instanceof Node ) { ( ( Node ) target ) . addEventHandler ( eet . eventType ( ) , aeh ) ; } else if ( target instanceof MenuItem ) { if ( eet . eventType ( ) == ActionEvent . ACTION ) { ( ( MenuItem ) target ) . addEventHandler ( ActionEvent . ACTION , new AnnotationEventHandler < > ( this . callbackObject , annotation ) ) ; } else { ( ( MenuItem ) target ) . setOnMenuValidation ( aeh ) ; } } else if ( target instanceof Window ) { ( ( Window ) target ) . addEventHandler ( eet . eventType ( ) , aeh ) ; } }
public class AutoModify { /** * Process the modify directives . * @ param APIM - * An instance of FedoraAPIM . * @ param UPLOADER - * An instance of the Uploader . * @ param directivesFilePath - * The absolute file path of the file containing the modify * directives . * @ param APIA - * An instance of FedoraAPIA . * @ param logFilePath - * The absolute file path of the log file . * @ param isValidateOnly - * Boolean flag ; true indicates validate only ; false indicates * process the directives file . */ public static void modify ( FedoraAPIMMTOM APIM , Uploader UPLOADER , FedoraAPIAMTOM APIA , String directivesFilePath , String logFilePath , boolean isValidateOnly ) { } }
InputStream in = null ; BatchModifyParser bmp = null ; BatchModifyValidator bmv = null ; long st = System . currentTimeMillis ( ) ; long et = 0 ; try { in = new FileInputStream ( directivesFilePath ) ; if ( isValidateOnly ) { openLog ( logFilePath , "validate-modify-directives" ) ; bmv = new BatchModifyValidator ( in , s_log ) ; } else { openLog ( logFilePath , "modify-batch" ) ; bmp = new BatchModifyParser ( UPLOADER , APIM , APIA , in , s_log ) ; } } catch ( Exception e ) { System . out . println ( e . getClass ( ) . getName ( ) + " - " + ( e . getMessage ( ) == null ? "(no detail provided)" : e . getMessage ( ) ) ) ; } finally { try { if ( in != null ) { in . close ( ) ; } if ( s_log != null ) { et = System . currentTimeMillis ( ) ; if ( bmp != null ) { if ( bmp . getFailedCount ( ) == - 1 ) { System . out . println ( "\n\n" + bmp . getSucceededCount ( ) + " modify directives successfully processed.\n" + "Parser error encountered.\n" + "An unknown number of modify directives were not processed.\n" + "See log file for details of those directives processed before the error.\n" + "Time elapsed: " + getDuration ( et - st ) ) ; s_log . println ( " <summary>" ) ; s_log . println ( " " + StreamUtility . enc ( bmp . getSucceededCount ( ) + " modify directives successfully processed.\n" + " Parser error encountered.\n" + " An unknown number of modify directives were not processed.\n" + " Time elapsed: " + getDuration ( et - st ) ) ) ; s_log . println ( " </summary>" ) ; } else { System . out . println ( "\n\n" + bmp . getSucceededCount ( ) + " modify directives successfully processed.\n" + bmp . getFailedCount ( ) + " modify directives failed.\n" + "See log file for details.\n" + "Time elapsed: " + getDuration ( et - st ) ) ; s_log . println ( " <summary>" ) ; s_log . println ( " " + StreamUtility . enc ( bmp . getSucceededCount ( ) + " modify directives successfully processed.\n " + bmp . getFailedCount ( ) + " modify directives failed.\n" + " Time elapsed: " + getDuration ( et - st ) ) ) ; s_log . println ( " </summary>" ) ; } } else if ( bmv != null ) { et = System . currentTimeMillis ( ) ; if ( bmv . isValid ( ) ) { System . out . println ( "Modify Directives File in \n" + directivesFilePath + "\n is Valid !" + "\nTime elapsed: " + getDuration ( et - st ) ) ; s_log . println ( " <summary>" ) ; s_log . println ( " Modify Directives File: \n " + directivesFilePath + "\n is Valid !" + "\n Time elapsed: " + getDuration ( et - st ) ) ; s_log . println ( " </summary>" ) ; } else { System . out . println ( bmv . getErrorCount ( ) + " XML validation Errors found in Modify Directives file.\n" + "See log file for details.\n" + "Time elapsed: " + getDuration ( et - st ) ) ; s_log . println ( " <summary>" ) ; s_log . println ( " " + StreamUtility . enc ( bmv . getErrorCount ( ) + " XML validation Errors found in Modify Directives file.\n" + " See log file for details.\n" + " Time elapsed: " + getDuration ( et - st ) ) ) ; s_log . println ( " </summary>" ) ; } } closeLog ( ) ; System . out . println ( "A detailed log file was created at\n" + logFilePath + "\n\n" ) ; } } catch ( Exception e ) { System . out . println ( e . getClass ( ) . getName ( ) + " - " + ( e . getMessage ( ) == null ? "(no detail provided)" : e . getMessage ( ) ) ) ; } }
public class TreeContent { /** * Add this node to the parent . * @ throws JspException if a JSP exception has occurred */ public void doTag ( ) throws JspException , IOException { } }
String value = getBufferBody ( true ) ; if ( value != null ) _text = value ; Object o = getParent ( ) ; assert ( o != null ) ; if ( ! ( o instanceof TreeItem ) ) { logger . error ( "Invalid Parent (expected a TreeItem):" + o . getClass ( ) . getName ( ) ) ; return ; } // assign the value to the parent ' s label value TreeItem ti = ( TreeItem ) o ; ti . setItemContent ( _text ) ;
public class NameSpace { /** * Clear all variables , methods , and imports from this namespace . If this * namespace is the root , it will be reset to the default imports . * @ see # loadDefaultImports ( ) */ public void clear ( ) { } }
this . variables . clear ( ) ; this . methods . clear ( ) ; this . importedClasses . clear ( ) ; this . importedPackages . clear ( ) ; this . importedCommands . clear ( ) ; this . importedObjects . clear ( ) ; if ( this . parent == null ) this . loadDefaultImports ( ) ; this . classCache . clear ( ) ; this . names . clear ( ) ;
public class ExplodedImporterImpl { /** * Calculate the relative child path . * @ param root * The Archive root folder * @ param child * The Child file * @ return a Path fort he child relative to root */ private ArchivePath calculatePath ( File root , File child ) { } }
String rootPath = unifyPath ( root . getPath ( ) ) ; String childPath = unifyPath ( child . getPath ( ) ) ; String archiveChildPath = childPath . replaceFirst ( Pattern . quote ( rootPath ) , "" ) ; return new BasicPath ( archiveChildPath ) ;
public class DataSourceTask { /** * Creates a writer for each output . Creates an OutputCollector which forwards its input to all writers . * The output collector applies the configured shipping strategy . */ private void initOutputs ( ClassLoader cl ) throws Exception { } }
this . chainedTasks = new ArrayList < ChainedDriver < ? , ? > > ( ) ; this . eventualOutputs = new ArrayList < BufferWriter > ( ) ; this . output = RegularPactTask . initOutputs ( this , cl , this . config , this . chainedTasks , this . eventualOutputs ) ;
public class SharedValue { /** * Change the shared value value irrespective of its previous state * @ param newValue new value * @ throws Exception ZK errors , interruptions , etc . */ public void setValue ( byte [ ] newValue ) throws Exception { } }
Preconditions . checkState ( state . get ( ) == State . STARTED , "not started" ) ; Stat result = client . setData ( ) . forPath ( path , newValue ) ; updateValue ( result . getVersion ( ) , Arrays . copyOf ( newValue , newValue . length ) ) ;
public class GeopositionComparator { /** * Returns the distance between the two points in meters . */ public static double distance ( double lat1 , double lon1 , double lat2 , double lon2 ) { } }
double dLat = Math . toRadians ( lat2 - lat1 ) ; double dLon = Math . toRadians ( lon2 - lon1 ) ; lat1 = Math . toRadians ( lat1 ) ; lat2 = Math . toRadians ( lat2 ) ; double a = Math . sin ( dLat / 2 ) * Math . sin ( dLat / 2 ) + Math . sin ( dLon / 2 ) * Math . sin ( dLon / 2 ) * Math . cos ( lat1 ) * Math . cos ( lat2 ) ; double c = 2 * Math . atan2 ( Math . sqrt ( a ) , Math . sqrt ( 1 - a ) ) ; return R * c ;
public class GeneratedDFactoryDaoImpl { /** * query - by method for field baseUrl * @ param baseUrl the specified attribute * @ return an Iterable of DFactorys for the specified baseUrl */ public Iterable < DFactory > queryByBaseUrl ( java . lang . String baseUrl ) { } }
return queryByField ( null , DFactoryMapper . Field . BASEURL . getFieldName ( ) , baseUrl ) ;
public class PolicyAssignmentsInner { /** * Deletes a policy assignment . * @ param scope The scope of the policy assignment . * @ param policyAssignmentName The name of the policy assignment to delete . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the PolicyAssignmentInner object */ public Observable < PolicyAssignmentInner > deleteAsync ( String scope , String policyAssignmentName ) { } }
return deleteWithServiceResponseAsync ( scope , policyAssignmentName ) . map ( new Func1 < ServiceResponse < PolicyAssignmentInner > , PolicyAssignmentInner > ( ) { @ Override public PolicyAssignmentInner call ( ServiceResponse < PolicyAssignmentInner > response ) { return response . body ( ) ; } } ) ;
public class server { /** * Use this API to update server . */ public static base_response update ( nitro_service client , server resource ) throws Exception { } }
server updateresource = new server ( ) ; updateresource . name = resource . name ; updateresource . ipaddress = resource . ipaddress ; updateresource . domainresolveretry = resource . domainresolveretry ; updateresource . translationip = resource . translationip ; updateresource . translationmask = resource . translationmask ; updateresource . domainresolvenow = resource . domainresolvenow ; updateresource . comment = resource . comment ; return updateresource . update_resource ( client ) ;
public class AxisSerializer { /** * Uses reflection to get an Axis Serializer . */ private < T extends Serializable > Serializer getSerializer ( QName xmlType , Class < T > clazz ) throws SecurityException , NoSuchMethodException , IllegalArgumentException , IllegalAccessException , InvocationTargetException { } }
Method getSerializer = clazz . getMethod ( "getSerializer" , String . class , Class . class , QName . class ) ; return ( Serializer ) getSerializer . invoke ( null , null , clazz , xmlType ) ;
public class SerializerIntrinsics { /** * Store st ( 0 ) as 16 bit or 32 bit Integer to @ a dst ( FPU ) . */ public final void fist ( Mem dst ) { } }
assert ( dst . size ( ) == 2 || dst . size ( ) == 4 ) ; emitX86 ( INST_FIST , dst ) ;
public class ClusterCacheStatus { /** * TODO : newMembers isn ' t really used , pruneInvalidMembers uses expectedMembers */ @ Override public synchronized void updateCurrentTopology ( List < Address > newMembers ) { } }
// The current topology might be null just after a joiner became the coordinator if ( currentTopology == null ) { createInitialCacheTopology ( ) ; } ConsistentHashFactory < ConsistentHash > consistentHashFactory = getJoinInfo ( ) . getConsistentHashFactory ( ) ; int topologyId = currentTopology . getTopologyId ( ) ; int rebalanceId = currentTopology . getRebalanceId ( ) ; ConsistentHash currentCH = currentTopology . getCurrentCH ( ) ; ConsistentHash pendingCH = currentTopology . getPendingCH ( ) ; if ( ! needConsistentHashUpdate ( ) ) { log . tracef ( "Cache %s members list was updated, but the cache topology doesn't need to change: %s" , cacheName , currentTopology ) ; return ; } if ( newMembers . isEmpty ( ) ) { log . tracef ( "Cache %s no longer has any members, removing topology" , cacheName ) ; setCurrentTopology ( null ) ; setStableTopology ( null ) ; rebalanceConfirmationCollector = null ; status = ComponentStatus . INSTANTIATED ; return ; } List < Address > newCurrentMembers = pruneInvalidMembers ( currentCH . getMembers ( ) ) ; ConsistentHash newCurrentCH , newPendingCH = null ; CacheTopology . Phase newPhase = CacheTopology . Phase . NO_REBALANCE ; List < Address > actualMembers ; if ( newCurrentMembers . isEmpty ( ) ) { // All the current members left , try to replace them with the joiners log . tracef ( "All current members left, re-initializing status for cache %s" , cacheName ) ; rebalanceConfirmationCollector = null ; newCurrentMembers = getExpectedMembers ( ) ; actualMembers = newCurrentMembers ; newCurrentCH = joinInfo . getConsistentHashFactory ( ) . create ( joinInfo . getHashFunction ( ) , joinInfo . getNumOwners ( ) , joinInfo . getNumSegments ( ) , newCurrentMembers , getCapacityFactors ( ) ) ; } else { // ReplicatedConsistentHashFactory allocates segments to all its members , so we can ' t add any members here newCurrentCH = consistentHashFactory . updateMembers ( currentCH , newCurrentMembers , getCapacityFactors ( ) ) ; actualMembers = newCurrentMembers ; if ( pendingCH != null ) { newPhase = currentTopology . getPhase ( ) ; List < Address > newPendingMembers = pruneInvalidMembers ( pendingCH . getMembers ( ) ) ; newPendingCH = consistentHashFactory . updateMembers ( pendingCH , newPendingMembers , getCapacityFactors ( ) ) ; actualMembers = pruneInvalidMembers ( newPendingMembers ) ; } } // Losing members during state transfer could lead to a state where we have more than two topologies // concurrently in the cluster . We need to make sure that all the topologies are compatible ( properties set // in CacheTopology docs hold ) - we just remove lost members . CacheTopology newTopology = new CacheTopology ( topologyId + 1 , rebalanceId , newCurrentCH , newPendingCH , newPhase , actualMembers , persistentUUIDManager . mapAddresses ( actualMembers ) ) ; setCurrentTopology ( newTopology ) ; if ( rebalanceConfirmationCollector != null ) { // The node that will cancel the state transfer because of another topology update won ' t send topology confirm log . debugf ( "Cancelling topology confirmation %s because of another topology update" , rebalanceConfirmationCollector ) ; rebalanceConfirmationCollector = null ; } CLUSTER . updatingTopology ( cacheName , newTopology , availabilityMode ) ; eventLogger . info ( EventLogCategory . CLUSTER , MESSAGES . cacheMembersUpdated ( actualMembers , newTopology . getTopologyId ( ) ) ) ; clusterTopologyManager . broadcastTopologyUpdate ( cacheName , newTopology , availabilityMode , isTotalOrder ( ) , isDistributed ( ) ) ;
public class AfplibFactoryImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public String convertMDDXmBaseToString ( EDataType eDataType , Object instanceValue ) { } }
return instanceValue == null ? null : instanceValue . toString ( ) ;
public class ParameterGroup { /** * Gets an enumeration describing the available options . * @ return an enumeration of all the available options . */ public Enumeration listOptions ( ) { } }
Vector result ; Enumeration en ; result = new Vector ( ) ; result . addElement ( new Option ( "\tA parameter setup for generating the setups.\n" + "\tCan be supplied multiple times.\n" + "\t(default: " + AbstractParameter . class . getName ( ) + ")" , "search" , 1 , "-search <classname options>" ) ) ; result . addElement ( new Option ( "" , "" , 0 , "\nOptions specific to search parameter class '" + MathParameter . class . getName ( ) + "' ('-search'):" ) ) ; en = new MathParameter ( ) . listOptions ( ) ; while ( en . hasMoreElements ( ) ) result . addElement ( en . nextElement ( ) ) ; result . addElement ( new Option ( "" , "" , 0 , "\nOptions specific to search parameter class '" + ListParameter . class . getName ( ) + "' ('-search'):" ) ) ; en = new ListParameter ( ) . listOptions ( ) ; while ( en . hasMoreElements ( ) ) result . addElement ( en . nextElement ( ) ) ; return result . elements ( ) ;
public class CmsResourceTypeStatResult { /** * Gets the localized result message . < p > * @ return the result as string */ public String getResult ( ) { } }
String res ; CmsSite site = OpenCms . getSiteManager ( ) . getSiteForSiteRoot ( m_siteRoot ) ; if ( site == null ) { res = CmsVaadinUtils . getMessageText ( Messages . GUI_DATABASEAPP_STATS_RESULTS_ROOT_1 , new Integer ( m_count ) ) ; } else { res = CmsVaadinUtils . getMessageText ( Messages . GUI_DATABASEAPP_STATS_RESULTS_2 , site . getTitle ( ) , new Integer ( m_count ) ) ; } return res ;
public class JNRPEClient { /** * Inovoke a command installed in JNRPE . * @ param sCommandName * The name of the command to be invoked * @ param arguments * The arguments to pass to the command ( will substitute the * $ ARGSx $ parameters ) * @ return The value returned by the server * @ throws JNRPEClientException * Thrown on any communication error . */ public final ReturnValue sendCommand ( final String sCommandName , final String ... arguments ) throws JNRPEClientException { } }
return sendRequest ( new JNRPERequest ( sCommandName , arguments ) ) ;
public class Shape { /** * Get the size of the specified dimension . Equivalent to shape ( ) [ dimension ] * @ param buffer The buffer to get the * @ param dimension The dimension to get . * @ return The size of the specified dimension */ public static int size ( IntBuffer buffer , int dimension ) { } }
int rank = rank ( buffer ) ; if ( dimension >= rank ) throw new IllegalArgumentException ( "Invalid dimension " + dimension + " for rank " + rank + " array" ) ; return buffer . get ( 1 + dimension ) ;
public class Marker { /** * Defines the color that will be used to colorize the marker . * @ param COLOR */ public void setColor ( final Color COLOR ) { } }
if ( null == color ) { _color = COLOR ; } else { color . set ( COLOR ) ; } fireMarkerEvent ( COLOR_CHANGED_EVENT ) ;
public class ClassFinder { /** * Searches the class path for a class with the given name , * returning a ClassFile for it . Returns null if not found . * @ param className the class to search for * @ return a ClassFile instance , or null if not found */ public ClassFile find ( String className ) { } }
for ( PathEntry pe : list ) { ClassFile cf = pe . find ( className ) ; if ( cf != null ) { return cf ; } } return null ;
public class ShardingRule { /** * Judge contains table in sharding rule . * @ param logicTableName logic table name * @ return contains table in sharding rule or not */ public boolean contains ( final String logicTableName ) { } }
return findTableRule ( logicTableName ) . isPresent ( ) || findBindingTableRule ( logicTableName ) . isPresent ( ) || isBroadcastTable ( logicTableName ) ;
public class GanttBarStyleFactory14 { /** * { @ inheritDoc } */ @ Override public GanttBarStyleException [ ] processExceptionStyles ( Props props ) { } }
GanttBarStyleException [ ] barStyle = null ; byte [ ] barData = props . getByteArray ( EXCEPTION_PROPERTIES ) ; if ( barData != null ) { // System . out . println ( ByteArrayHelper . hexdump ( barData , false , 71 , " " ) ) ; barStyle = new GanttBarStyleException [ barData . length / 71 ] ; int offset = 0 ; for ( int loop = 0 ; loop < barStyle . length ; loop ++ ) { GanttBarStyleException style = new GanttBarStyleException ( ) ; barStyle [ loop ] = style ; style . setTaskUniqueID ( MPPUtility . getInt ( barData , offset ) ) ; style . setBarStyleIndex ( MPPUtility . getShort ( barData , offset + 4 ) - 1 ) ; style . setStartShape ( GanttBarStartEndShape . getInstance ( barData [ offset + 20 ] % 25 ) ) ; style . setStartType ( GanttBarStartEndType . getInstance ( barData [ offset + 20 ] / 25 ) ) ; style . setStartColor ( MPPUtility . getColor ( barData , offset + 21 ) ) ; style . setMiddleShape ( GanttBarMiddleShape . getInstance ( barData [ offset + 6 ] ) ) ; style . setMiddlePattern ( ChartPattern . getInstance ( barData [ offset + 7 ] ) ) ; style . setMiddleColor ( MPPUtility . getColor ( barData , offset + 8 ) ) ; style . setEndShape ( GanttBarStartEndShape . getInstance ( barData [ offset + 33 ] % 25 ) ) ; style . setEndType ( GanttBarStartEndType . getInstance ( barData [ offset + 33 ] / 25 ) ) ; style . setEndColor ( MPPUtility . getColor ( barData , offset + 34 ) ) ; style . setLeftText ( getTaskField ( MPPUtility . getShort ( barData , offset + 49 ) ) ) ; style . setRightText ( getTaskField ( MPPUtility . getShort ( barData , offset + 53 ) ) ) ; style . setTopText ( getTaskField ( MPPUtility . getShort ( barData , offset + 57 ) ) ) ; style . setBottomText ( getTaskField ( MPPUtility . getShort ( barData , offset + 61 ) ) ) ; style . setInsideText ( getTaskField ( MPPUtility . getShort ( barData , offset + 65 ) ) ) ; // System . out . println ( style ) ; offset += 71 ; } } return barStyle ;
public class ExpressRouteCircuitsInner { /** * Gets the currently advertised ARP table associated with the express route circuit in a resource group . * @ param resourceGroupName The name of the resource group . * @ param circuitName The name of the express route circuit . * @ param peeringName The name of the peering . * @ param devicePath The path of the device . * @ param serviceCallback the async ServiceCallback to handle successful and failed responses . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the { @ link ServiceFuture } object */ public ServiceFuture < ExpressRouteCircuitsArpTableListResultInner > beginListArpTableAsync ( String resourceGroupName , String circuitName , String peeringName , String devicePath , final ServiceCallback < ExpressRouteCircuitsArpTableListResultInner > serviceCallback ) { } }
return ServiceFuture . fromResponse ( beginListArpTableWithServiceResponseAsync ( resourceGroupName , circuitName , peeringName , devicePath ) , serviceCallback ) ;
public class AfplibPackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public EEnum getObjectClassificationObjClass ( ) { } }
if ( objectClassificationObjClassEEnum == null ) { objectClassificationObjClassEEnum = ( EEnum ) EPackage . Registry . INSTANCE . getEPackage ( AfplibPackage . eNS_URI ) . getEClassifiers ( ) . get ( 100 ) ; } return objectClassificationObjClassEEnum ;
public class BPFImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public void eUnset ( int featureID ) { } }
switch ( featureID ) { case AfplibPackage . BPF__PF_NAME : setPFName ( PF_NAME_EDEFAULT ) ; return ; case AfplibPackage . BPF__TRIPLETS : getTriplets ( ) . clear ( ) ; return ; } super . eUnset ( featureID ) ;
public class MeanShiftPeak { /** * Updates the location of the rectangular bounding box * @ param cx Image center x - axis * @ param cy Image center y - axis */ protected void setRegion ( float cx , float cy ) { } }
x0 = cx - radius ; y0 = cy - radius ; if ( x0 < 0 ) { x0 = 0 ; } else if ( x0 + width > image . width ) { x0 = image . width - width ; } if ( y0 < 0 ) { y0 = 0 ; } else if ( y0 + width > image . height ) { y0 = image . height - width ; }
public class LineMap { /** * Parses a Java stack trace , converting files and line numbers when * possible . */ private CharBuffer filter ( char [ ] array ) { } }
CharBuffer buf = new CharBuffer ( ) ; CharBuffer fun = new CharBuffer ( ) ; CharBuffer file = new CharBuffer ( ) ; int i = 0 ; while ( i < array . length ) { fun . clear ( ) ; file . clear ( ) ; int start = i ; int end ; for ( end = i ; end < array . length && array [ end ] != '\n' ; end ++ ) { } for ( ; i < end && Character . isWhitespace ( array [ i ] ) ; i ++ ) { fun . append ( array [ i ] ) ; } // skip ' at ' for ( ; i < end && ! Character . isWhitespace ( array [ i ] ) ; i ++ ) { fun . append ( array [ i ] ) ; } if ( ! fun . endsWith ( "at" ) ) { for ( i = start ; i < end ; i ++ ) { buf . append ( array [ i ] ) ; } i = end + 1 ; buf . append ( '\n' ) ; continue ; } for ( ; i < end && Character . isWhitespace ( array [ i ] ) ; i ++ ) { } fun . clear ( ) ; for ( ; i < end && ! Character . isWhitespace ( array [ i ] ) && array [ i ] != '(' ; i ++ ) { fun . append ( array [ i ] ) ; } if ( i < end && array [ i ] == '(' ) i ++ ; for ( ; i < end && ! Character . isWhitespace ( array [ i ] ) && array [ i ] != ':' && array [ i ] != ')' ; i ++ ) { file . append ( array [ i ] ) ; } int line = - 1 ; if ( i < end && array [ i ] == ':' ) { line = 0 ; for ( i ++ ; i < end && array [ i ] >= '0' && array [ i ] <= '9' ; i ++ ) { line = 10 * line + array [ i ] - '0' ; } } for ( ; i < end && ! Character . isWhitespace ( array [ i ] ) && array [ i ] != ':' && array [ i ] != ')' ; i ++ ) { file . append ( array [ i ] ) ; } buf . append ( "\tat " ) ; buf . append ( fun ) ; buf . append ( "(" ) ; String dstFile = file . toString ( ) ; if ( dstFile . equals ( _dstFilename ) ) { convertError ( buf , line ) ; } else { buf . append ( file ) ; if ( line > 0 ) { buf . append ( ":" ) ; buf . append ( line ) ; } } buf . append ( array , i , end - i ) ; buf . append ( '\n' ) ; i = end + 1 ; } return buf ;
public class BuiltInErrorProducer { /** * testing * */ boolean wouldCatch ( Class c ) { } }
if ( defaultCatchers != null && defaultCatchers . size ( ) > 0 ) return true ; List < ErrorCatcher > errorCatcherList = catchers . get ( c . getName ( ) ) ; if ( errorCatcherList != null && errorCatcherList . size ( ) > 0 ) return true ; return false ;
public class AbstractPrintQuery { /** * Get the object returned by the given select statement . * @ param < T > class the return value will be casted to * @ param _ selectStmt select statement the object is wanted for * @ return object for the select statement * @ throws EFapsException on error */ @ SuppressWarnings ( "unchecked" ) public < T > T getSelect ( final String _selectStmt ) throws EFapsException { } }
final OneSelect oneselect = this . selectStmt2OneSelect . get ( _selectStmt ) ; return oneselect == null ? null : ( T ) oneselect . getObject ( ) ;
public class SequenceSampler { /** * Runs the Viterbi algorithm on the sequence model given by the TagScorer * in order to find the best sequence . * @ return an array containing the int tags of the best sequence */ public int [ ] bestSequence ( SequenceModel ts ) { } }
int [ ] sample = new int [ ts . length ( ) + ts . leftWindow ( ) ] ; for ( int pos = ts . leftWindow ( ) ; pos < sample . length ; pos ++ ) { double [ ] scores = ts . scoresOf ( sample , pos ) ; double total = 0.0 ; for ( int i = 0 ; i < scores . length ; i ++ ) { scores [ i ] = Math . exp ( scores [ i ] ) ; } ArrayMath . normalize ( scores ) ; int l = ArrayMath . sampleFromDistribution ( scores ) ; sample [ pos ] = ts . getPossibleValues ( pos ) [ l ] ; } return sample ;
public class SlotManager { /** * Finds a matching slot request for a given resource profile . If there is no such request , * the method returns null . * < p > Note : If you want to change the behaviour of the slot manager wrt slot allocation and * request fulfillment , then you should override this method . * @ param slotResourceProfile defining the resources of an available slot * @ return A matching slot request which can be deployed in a slot with the given resource * profile . Null if there is no such slot request pending . */ protected PendingSlotRequest findMatchingRequest ( ResourceProfile slotResourceProfile ) { } }
for ( PendingSlotRequest pendingSlotRequest : pendingSlotRequests . values ( ) ) { if ( ! pendingSlotRequest . isAssigned ( ) && slotResourceProfile . isMatching ( pendingSlotRequest . getResourceProfile ( ) ) ) { return pendingSlotRequest ; } } return null ;
public class InternalMailUtil { /** * 解析第一个地址 * @ param address 地址字符串 * @ param charset 编码 * @ return 地址列表 */ public static InternetAddress parseFirstAddress ( String address , Charset charset ) { } }
final InternetAddress [ ] internetAddresses = parseAddress ( address , charset ) ; if ( ArrayUtil . isEmpty ( internetAddresses ) ) { try { return new InternetAddress ( address ) ; } catch ( AddressException e ) { throw new MailException ( e ) ; } } return internetAddresses [ 0 ] ;
public class ServiceRefObjectFactory { /** * This method will create an instance of either a javax . xml . ws . Service subclass , or it will create an SEI type . * This will be called by either the resource injection engine or by the naming code when a JNDI lookup is done . */ @ Override public Object getObjectInstance ( Object obj , Name name , Context context , @ Sensitive Hashtable < ? , ? > environment ) throws Exception { } }
if ( ! ( obj instanceof Reference ) ) { return null ; } Reference ref = ( Reference ) obj ; if ( ! ServiceRefObjectFactory . class . getName ( ) . equals ( ref . getFactoryClassName ( ) ) ) { return null ; } // Retrieve our service - ref metadata from the Reference object . WebServiceRefInfo wsrInfo = null ; WebServiceRefInfoRefAddr wsrInfoRefAddr = ( WebServiceRefInfoRefAddr ) ref . get ( WebServiceRefInfoRefAddr . ADDR_KEY ) ; if ( wsrInfoRefAddr != null ) { wsrInfo = ( WebServiceRefInfo ) wsrInfoRefAddr . getContent ( ) ; } // Make sure we found the WebServiceRefInfo object that contains the service - ref metadata . if ( wsrInfo == null ) { throw new Exception ( "Internal Error: Can not found the WebServiceRefInfo." ) ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "Service Ref JNDI name: " + wsrInfo . getJndiName ( ) ) ; } // Get the client metadata JaxWsClientMetaData declaredClientMetaData = wsrInfo . getClientMetaData ( ) ; // 146981 if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) Tr . debug ( tc , "declaredClientMetaData: " + declaredClientMetaData ) ; JaxWsClientMetaData currentClientMetaData = JaxWsMetaDataManager . getJaxWsClientMetaData ( ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) Tr . debug ( tc , "currentClientMetaData: " + currentClientMetaData ) ; if ( declaredClientMetaData == null ) { declaredClientMetaData = currentClientMetaData ; } // If we didn ' t find the ClientMetaData , we have a problem . if ( declaredClientMetaData == null ) { throw new IllegalStateException ( "Internal Error: Can not found the JaxWsClientMetaData" ) ; } mergeWebServicesBndInfo ( wsrInfo , declaredClientMetaData ) ; // 1 . The Bus from client module which declares the serviceRef is used for locating WSDL or other usage . // 2 . The classLoader from current client module is used for loading service interface or other related stub classes . // The scenario is that , different modules could packge the same stub classes in their own classpaths . Bus originalThreadBus = BusFactory . getThreadDefaultBus ( false ) ; try { BusFactory . setThreadDefaultBus ( declaredClientMetaData . getClientBus ( ) ) ; // Collect all of our module - specific service - ref metadata . TransientWebServiceRefInfo tInfo = new TransientWebServiceRefInfo ( declaredClientMetaData , wsrInfo , declaredClientMetaData . getModuleMetaData ( ) . getAppContextClassLoader ( ) ) ; Object instance = getInstance ( tInfo , wsrInfo ) ; return instance ; } finally { BusFactory . setThreadDefaultBus ( originalThreadBus ) ; }
public class JavaParser { /** * src / main / resources / org / drools / compiler / semantics / java / parser / Java . g : 329:1 : enumConstant : ( annotations ) ? Identifier ( arguments ) ? ( classBody ) ? ; */ public final void enumConstant ( ) throws RecognitionException { } }
int enumConstant_StartIndex = input . index ( ) ; try { if ( state . backtracking > 0 && alreadyParsedRule ( input , 14 ) ) { return ; } // src / main / resources / org / drools / compiler / semantics / java / parser / Java . g : 330:5 : ( ( annotations ) ? Identifier ( arguments ) ? ( classBody ) ? ) // src / main / resources / org / drools / compiler / semantics / java / parser / Java . g : 330:7 : ( annotations ) ? Identifier ( arguments ) ? ( classBody ) ? { // src / main / resources / org / drools / compiler / semantics / java / parser / Java . g : 330:7 : ( annotations ) ? int alt23 = 2 ; int LA23_0 = input . LA ( 1 ) ; if ( ( LA23_0 == 58 ) ) { alt23 = 1 ; } switch ( alt23 ) { case 1 : // src / main / resources / org / drools / compiler / semantics / java / parser / Java . g : 330:7 : annotations { pushFollow ( FOLLOW_annotations_in_enumConstant491 ) ; annotations ( ) ; state . _fsp -- ; if ( state . failed ) return ; } break ; } match ( input , Identifier , FOLLOW_Identifier_in_enumConstant494 ) ; if ( state . failed ) return ; // src / main / resources / org / drools / compiler / semantics / java / parser / Java . g : 330:31 : ( arguments ) ? int alt24 = 2 ; int LA24_0 = input . LA ( 1 ) ; if ( ( LA24_0 == 36 ) ) { alt24 = 1 ; } switch ( alt24 ) { case 1 : // src / main / resources / org / drools / compiler / semantics / java / parser / Java . g : 330:32 : arguments { pushFollow ( FOLLOW_arguments_in_enumConstant497 ) ; arguments ( ) ; state . _fsp -- ; if ( state . failed ) return ; } break ; } // src / main / resources / org / drools / compiler / semantics / java / parser / Java . g : 330:44 : ( classBody ) ? int alt25 = 2 ; int LA25_0 = input . LA ( 1 ) ; if ( ( LA25_0 == 121 ) ) { alt25 = 1 ; } switch ( alt25 ) { case 1 : // src / main / resources / org / drools / compiler / semantics / java / parser / Java . g : 330:45 : classBody { pushFollow ( FOLLOW_classBody_in_enumConstant502 ) ; classBody ( ) ; state . _fsp -- ; if ( state . failed ) return ; } break ; } } } catch ( RecognitionException re ) { reportError ( re ) ; recover ( input , re ) ; } finally { // do for sure before leaving if ( state . backtracking > 0 ) { memoize ( input , 14 , enumConstant_StartIndex ) ; } }
public class MediaApi { /** * Get the attachment of the interaction * Get the attachment of the interaction specified in the documentId path parameter * @ param mediatype media - type of interaction ( required ) * @ param id id of interaction ( required ) * @ param documentId id of document to get ( required ) * @ return String * @ throws ApiException If fail to call the API , e . g . server error or cannot deserialize the response body */ public String attachments ( String mediatype , String id , String documentId ) throws ApiException { } }
ApiResponse < String > resp = attachmentsWithHttpInfo ( mediatype , id , documentId ) ; return resp . getData ( ) ;
public class FileCopyUtils { /** * Copy the contents of the given String to the given output Writer . * Closes the writer when done . * @ param in the String to copy from * @ param out the Writer to copy to * @ throws IOException in case of I / O errors */ public static void copy ( String in , Writer out ) throws IOException { } }
Assert . notNull ( in , "No input String specified" ) ; Assert . notNull ( out , "No Writer specified" ) ; try { out . write ( in ) ; } finally { try { out . close ( ) ; } catch ( IOException ex ) { } }
public class QuerySelectProducer { /** * Processes each row of the { @ link ResultSet } . * @ param subscriber * @ throws SQLException */ private void processRow ( Subscriber < ? super T > subscriber ) throws SQLException { } }
checkSubscription ( subscriber ) ; if ( ! keepGoing ) return ; if ( rs . next ( ) ) { log . trace ( "onNext" ) ; subscriber . onNext ( function . call ( rs ) ) ; } else keepGoing = false ;
public class DefaultTaskSet { /** * Removes all available elements from this task set . Any tasks that are blocked will not be * removed . Will cease draining if the thread is interrupted . */ public int drainTo ( Collection < ? super Task > c ) { } }
if ( c == this ) throw new IllegalArgumentException ( "cannot drain task set into itself" ) ; if ( c == null ) throw new NullPointerException ( "target collection must not be null" ) ; int count = 0 ; while ( true ) { try { Task t = this . poll ( 0 , TimeUnit . SECONDS ) ; if ( t == null ) break ; else c . add ( t ) ; } catch ( InterruptedException e ) { break ; } } return count ;
public class ParticleGenerator { /** * Set new point coordinates somewhere on screen and apply new direction * @ param position the point position to apply new values to */ void applyFreshParticleOnScreen ( @ NonNull final Scene scene , final int position ) { } }
final int w = scene . getWidth ( ) ; final int h = scene . getHeight ( ) ; if ( w == 0 || h == 0 ) { throw new IllegalStateException ( "Cannot generate particles if scene width or height is 0" ) ; } final double direction = Math . toRadians ( random . nextInt ( 360 ) ) ; final float dCos = ( float ) Math . cos ( direction ) ; final float dSin = ( float ) Math . sin ( direction ) ; final float x = random . nextInt ( w ) ; final float y = random . nextInt ( h ) ; final float speedFactor = newRandomIndividualParticleSpeedFactor ( ) ; final float radius = newRandomIndividualParticleRadius ( scene ) ; scene . setParticleData ( position , x , y , dCos , dSin , radius , speedFactor ) ;
public class RedisInner { /** * Reboot specified Redis node ( s ) . This operation requires write permission to the cache resource . There can be potential data loss . * @ param resourceGroupName The name of the resource group . * @ param name The name of the Redis cache . * @ param parameters Specifies which Redis node ( s ) to reboot . * @ param serviceCallback the async ServiceCallback to handle successful and failed responses . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the { @ link ServiceFuture } object */ public ServiceFuture < RedisForceRebootResponseInner > forceRebootAsync ( String resourceGroupName , String name , RedisRebootParameters parameters , final ServiceCallback < RedisForceRebootResponseInner > serviceCallback ) { } }
return ServiceFuture . fromResponse ( forceRebootWithServiceResponseAsync ( resourceGroupName , name , parameters ) , serviceCallback ) ;
public class SVGParser { /** * < path > element */ private void path ( Attributes attributes ) throws SVGParseException { } }
debug ( "<path>" ) ; if ( currentElement == null ) throw new SVGParseException ( "Invalid document. Root element must be <svg>" ) ; SVG . Path obj = new SVG . Path ( ) ; obj . document = svgDocument ; obj . parent = currentElement ; parseAttributesCore ( obj , attributes ) ; parseAttributesStyle ( obj , attributes ) ; parseAttributesTransform ( obj , attributes ) ; parseAttributesConditional ( obj , attributes ) ; parseAttributesPath ( obj , attributes ) ; currentElement . addChild ( obj ) ;
public class InetAddressPredicates { /** * Returns a { @ link Predicate } which returns { @ code true } if the given { @ link InetAddress } is in the * range of a < a href = " https : / / tools . ietf . org / html / rfc4632 " > Classless Inter - domain Routing ( CIDR ) < / a > block . * @ param cidr the CIDR notation of an address block , e . g . { @ code 10.0.0.0/8 } , { @ code 192.168.1.0/24 } , * { @ code 1080:0:0:0:8:800:200C : 4100/120} */ public static Predicate < InetAddress > ofCidr ( String cidr ) { } }
requireNonNull ( cidr , "cidr" ) ; final int delim = cidr . indexOf ( '/' ) ; checkArgument ( delim >= 0 , "Invalid CIDR notation: %s" , cidr ) ; final InetAddress baseAddress ; try { baseAddress = InetAddress . getByName ( cidr . substring ( 0 , delim ) ) ; } catch ( UnknownHostException e ) { throw new IllegalArgumentException ( "Invalid CIDR notation: " + cidr , e ) ; } final String subnetMask = cidr . substring ( delim + 1 ) ; checkArgument ( ! subnetMask . isEmpty ( ) , "Invalid CIDR notation: %s" , cidr ) ; final int maskBits ; if ( NetUtil . isValidIpV4Address ( subnetMask ) ) { maskBits = toMaskBits ( subnetMask ) ; return ofCidr ( baseAddress , maskBits , maskBits + 96 ) ; } try { maskBits = Integer . parseInt ( subnetMask ) ; } catch ( Exception e ) { throw new IllegalArgumentException ( "Invalid CIDR notation: " + cidr , e ) ; } return ofCidr ( baseAddress , maskBits , maskBits ) ;
public class clusternode { /** * Use this API to add clusternode . */ public static base_response add ( nitro_service client , clusternode resource ) throws Exception { } }
clusternode addresource = new clusternode ( ) ; addresource . nodeid = resource . nodeid ; addresource . ipaddress = resource . ipaddress ; addresource . state = resource . state ; addresource . backplane = resource . backplane ; addresource . priority = resource . priority ; return addresource . add_resource ( client ) ;
public class DatabasesInner { /** * Retrieves the usages ( most recent data ) for the given database . * @ param resourceGroupName Name of an Azure resource group . * @ param accountName Cosmos DB database account name . * @ param databaseRid Cosmos DB database rid . * @ param filter An OData filter expression that describes a subset of usages to return . The supported parameter is name . value ( name of the metric , can have an or of multiple names ) . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the List & lt ; UsageInner & gt ; object */ public Observable < ServiceResponse < List < UsageInner > > > listUsagesWithServiceResponseAsync ( String resourceGroupName , String accountName , String databaseRid , String filter ) { } }
if ( this . client . subscriptionId ( ) == null ) { throw new IllegalArgumentException ( "Parameter this.client.subscriptionId() is required and cannot be null." ) ; } if ( resourceGroupName == null ) { throw new IllegalArgumentException ( "Parameter resourceGroupName is required and cannot be null." ) ; } if ( accountName == null ) { throw new IllegalArgumentException ( "Parameter accountName is required and cannot be null." ) ; } if ( databaseRid == null ) { throw new IllegalArgumentException ( "Parameter databaseRid is required and cannot be null." ) ; } if ( this . client . apiVersion ( ) == null ) { throw new IllegalArgumentException ( "Parameter this.client.apiVersion() is required and cannot be null." ) ; } return service . listUsages ( this . client . subscriptionId ( ) , resourceGroupName , accountName , databaseRid , this . client . apiVersion ( ) , filter , this . client . acceptLanguage ( ) , this . client . userAgent ( ) ) . flatMap ( new Func1 < Response < ResponseBody > , Observable < ServiceResponse < List < UsageInner > > > > ( ) { @ Override public Observable < ServiceResponse < List < UsageInner > > > call ( Response < ResponseBody > response ) { try { ServiceResponse < PageImpl < UsageInner > > result = listUsagesDelegate ( response ) ; List < UsageInner > items = null ; if ( result . body ( ) != null ) { items = result . body ( ) . items ( ) ; } ServiceResponse < List < UsageInner > > clientResponse = new ServiceResponse < List < UsageInner > > ( items , result . response ( ) ) ; return Observable . just ( clientResponse ) ; } catch ( Throwable t ) { return Observable . error ( t ) ; } } } ) ;
public class MpxjCreate { /** * Main method . * @ param args array of command line arguments */ public static void main ( String [ ] args ) { } }
try { if ( args . length != 1 ) { System . out . println ( "Usage: MpxCreate <output file name>" ) ; } else { create ( args [ 0 ] ) ; } } catch ( Exception ex ) { ex . printStackTrace ( System . out ) ; }
public class LoopBarView { /** * Initiate LoopBar with menu * @ param menuRes id for inflating { @ link Menu } */ public void setCategoriesAdapterFromMenu ( @ MenuRes int menuRes ) { } }
Menu menu = new MenuBuilder ( getContext ( ) ) ; new MenuInflater ( getContext ( ) ) . inflate ( menuRes , menu ) ; setCategoriesAdapterFromMenu ( menu ) ;
public class BeatFinder { /** * Helper method to check that we got the right size packet . * @ param packet a packet that has been received * @ param expectedLength the number of bytes we expect it to contain * @ param name the description of the packet in case we need to report issues with the length * @ return { @ code true } if enough bytes were received to process the packet */ private boolean isPacketLongEnough ( DatagramPacket packet , int expectedLength , String name ) { } }
final int length = packet . getLength ( ) ; if ( length < expectedLength ) { logger . warn ( "Ignoring too-short " + name + " packet; expecting " + expectedLength + " bytes and got " + length + "." ) ; return false ; } if ( length > expectedLength ) { logger . warn ( "Processing too-long " + name + " packet; expecting " + expectedLength + " bytes and got " + length + "." ) ; } return true ;
public class HashUtil { /** * BKDR算法 * @ param str 字符串 * @ return hash值 */ public static int bkdrHash ( String str ) { } }
int seed = 131 ; // 31 131 1313 13131 131313 etc . . int hash = 0 ; for ( int i = 0 ; i < str . length ( ) ; i ++ ) { hash = ( hash * seed ) + str . charAt ( i ) ; } return hash & 0x7FFFFFFF ;
public class PowerDecay { /** * Controls the rate early in time , but has a decreasing impact on the rate * returned as time goes forward . Larger values of & tau ; dampen the initial * rates returned , while lower values let the initial rates start higher . * @ param tau the early rate dampening parameter */ public void setTau ( double tau ) { } }
if ( tau <= 0 || Double . isInfinite ( tau ) || Double . isNaN ( tau ) ) throw new IllegalArgumentException ( "tau must be a positive constant, not " + tau ) ; this . tau = tau ;
public class UpdateNotificationSettingsRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( UpdateNotificationSettingsRequest updateNotificationSettingsRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( updateNotificationSettingsRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( updateNotificationSettingsRequest . getHITTypeId ( ) , HITTYPEID_BINDING ) ; protocolMarshaller . marshall ( updateNotificationSettingsRequest . getNotification ( ) , NOTIFICATION_BINDING ) ; protocolMarshaller . marshall ( updateNotificationSettingsRequest . getActive ( ) , ACTIVE_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class GridRecordMessageFilter { /** * Does this message header match this filter ? * @ param messageHeader The message header to check . * @ return true if match , false if no match . */ public boolean isFilterMatch ( BaseMessageHeader messageHeader ) { } }
boolean bMatch = super . isFilterMatch ( messageHeader ) ; if ( bMatch ) { if ( ! ( messageHeader instanceof RecordMessageHeader ) ) return false ; // Never RecordMessageHeader recMessageHeader = ( RecordMessageHeader ) messageHeader ; int iRecordMessageType = recMessageHeader . getRecordMessageType ( ) ; if ( ( iRecordMessageType == DBConstants . AFTER_UPDATE_TYPE ) || ( ( iRecordMessageType == DBConstants . AFTER_DELETE_TYPE ) ) ) { if ( ! m_htBookmarks . contains ( recMessageHeader . getBookmark ( DBConstants . BOOKMARK_HANDLE ) ) ) bMatch = false ; // No match else bMatch = true ; } else if ( iRecordMessageType == DBConstants . AFTER_ADD_TYPE ) { if ( ( m_strSecondaryKey != null ) && ( m_objKeyData != null ) ) { // If there is a hint , it won ' t take much processing power to check for a match bMatch = recMessageHeader . isMatchHint ( m_strSecondaryKey , m_objKeyData ) ; } else { // There is no hint , so you could potentially get a ton of add messages bMatch = true ; // What the heck , as long as I don ' t have to send it up , process this message . for ( int i = 0 ; ( this . getMessageListener ( i ) != null ) ; i ++ ) { if ( this . getMessageListener ( i ) instanceof RemoteReceiveQueue ) { bMatch = m_bReceiveAllAdds ; // Do I receive all the add messages ? break ; } } } } else if ( iRecordMessageType == DBConstants . CACHE_UPDATE_TYPE ) { if ( ! m_htBookmarks . contains ( recMessageHeader . getBookmark ( DBConstants . BOOKMARK_HANDLE ) ) ) bMatch = false ; // No match else bMatch = true ; } else bMatch = false ; // No match } return bMatch ;
public class SCSIResponseParser { /** * { @ inheritDoc } */ @ Override protected final int serializeBytes1to3 ( ) { } }
int line = status . value ( ) ; line |= response . value ( ) << Constants . ONE_BYTE_SHIFT ; if ( residualUnderflow ) { line |= Constants . RESIDUAL_UNDERFLOW_FLAG_MASK ; } if ( residualOverflow ) { line |= Constants . RESIDUAL_OVERFLOW_FLAG_MASK ; } if ( bidirectionalReadResidualUnderflow ) { line |= Constants . READ_RESIDUAL_UNDERFLOW_FLAG_MASK ; } if ( bidirectionalReadResidualOverflow ) { line |= Constants . READ_RESIDUAL_OVERFLOW_FLAG_MASK ; } return line ;
public class WorkWrapper { /** * Cancel */ protected void cancel ( ) { } }
if ( trace ) log . tracef ( "Cancel work: %s" , this ) ; ExecutionContext ctx = getWorkContext ( TransactionContext . class ) ; if ( ctx == null ) { ctx = getExecutionContext ( ) ; } if ( ctx != null ) { Xid xid = ctx . getXid ( ) ; if ( xid != null ) { workManager . getXATerminator ( ) . cancelWork ( work , xid ) ; } } if ( trace ) log . tracef ( "Canceled work: %s" , this ) ;
public class CustomTopicXMLValidator { /** * Checks that the topics root element matches the topic type . * @ param topic The topic to validate the doc against . * @ param doc The topics XML DOM Document . * @ return A list of error messages for any invalid content found , otherwise an empty list . */ public static List < String > checkTopicRootElement ( final ServerSettingsWrapper serverSettings , final BaseTopicWrapper < ? > topic , final Document doc ) { } }
final List < String > xmlErrors = new ArrayList < String > ( ) ; final ServerEntitiesWrapper serverEntities = serverSettings . getEntities ( ) ; if ( isTopicANormalTopic ( topic , serverSettings ) ) { if ( ! doc . getDocumentElement ( ) . getNodeName ( ) . equals ( DocBookUtilities . TOPIC_ROOT_NODE_NAME ) ) { xmlErrors . add ( "Topics must be a <" + DocBookUtilities . TOPIC_ROOT_NODE_NAME + ">." ) ; } } else { if ( topic . hasTag ( serverEntities . getRevisionHistoryTagId ( ) ) ) { if ( ! doc . getDocumentElement ( ) . getNodeName ( ) . equals ( "appendix" ) ) { xmlErrors . add ( "Revision History topics must be an <appendix>." ) ; } // Check to make sure that a revhistory entry exists final NodeList revHistoryList = doc . getElementsByTagName ( "revhistory" ) ; if ( revHistoryList . getLength ( ) == 0 ) { xmlErrors . add ( "No <revhistory> element found. A <revhistory> must exist for Revision Histories." ) ; } } else if ( topic . hasTag ( serverEntities . getLegalNoticeTagId ( ) ) ) { if ( ! doc . getDocumentElement ( ) . getNodeName ( ) . equals ( "legalnotice" ) ) { xmlErrors . add ( "Legal Notice topics must be a <legalnotice>." ) ; } } else if ( topic . hasTag ( serverEntities . getAuthorGroupTagId ( ) ) ) { if ( ! doc . getDocumentElement ( ) . getNodeName ( ) . equals ( "authorgroup" ) ) { xmlErrors . add ( "Author Group topics must be an <authorgroup>." ) ; } } else if ( topic . hasTag ( serverEntities . getAbstractTagId ( ) ) ) { if ( ! doc . getDocumentElement ( ) . getNodeName ( ) . equals ( "abstract" ) ) { xmlErrors . add ( "Abstract topics must be an <abstract>." ) ; } } else if ( topic . hasTag ( serverEntities . getInfoTagId ( ) ) ) { if ( DocBookVersion . DOCBOOK_50 . getId ( ) . equals ( topic . getXmlFormat ( ) ) ) { if ( ! doc . getDocumentElement ( ) . getNodeName ( ) . equals ( "info" ) ) { xmlErrors . add ( "Info topics must be an <info>." ) ; } } else { if ( ! doc . getDocumentElement ( ) . getNodeName ( ) . equals ( "sectioninfo" ) ) { xmlErrors . add ( "Info topics must be a <sectioninfo>." ) ; } } } } return xmlErrors ;
public class FaxCreator { /** * Add the requested post parameters to the Request . * @ param request Request to add post params to */ private void addPostParams ( final Request request ) { } }
if ( to != null ) { request . addPostParam ( "To" , to ) ; } if ( mediaUrl != null ) { request . addPostParam ( "MediaUrl" , mediaUrl . toString ( ) ) ; } if ( quality != null ) { request . addPostParam ( "Quality" , quality . toString ( ) ) ; } if ( statusCallback != null ) { request . addPostParam ( "StatusCallback" , statusCallback . toString ( ) ) ; } if ( from != null ) { request . addPostParam ( "From" , from ) ; } if ( sipAuthUsername != null ) { request . addPostParam ( "SipAuthUsername" , sipAuthUsername ) ; } if ( sipAuthPassword != null ) { request . addPostParam ( "SipAuthPassword" , sipAuthPassword ) ; } if ( storeMedia != null ) { request . addPostParam ( "StoreMedia" , storeMedia . toString ( ) ) ; } if ( ttl != null ) { request . addPostParam ( "Ttl" , ttl . toString ( ) ) ; }
public class ImplMedianHistogramInner { /** * Applies a median image filter . * @ param input Input image . Not modified . * @ param output Filtered output image . Modified . * @ param radius Size of the filter region . * @ param work ( Optional ) used to create local workspace */ public static void process ( GrayU8 input , GrayU8 output , int radius , @ Nullable IWorkArrays work ) { } }
if ( work == null ) work = new IWorkArrays ( ) ; work . reset ( 256 ) ; final IWorkArrays _work = work ; int w = 2 * radius + 1 ; // sanity check to make sure the image isn ' t too small to be processed by this algorithm if ( input . width < w || input . height < w ) return ; // defines what the median is . technically this is an approximation because if even it ' s the ave // of the two elements in the middle . I ' m not aware of libraries which actually do this . int threshold = ( w * w ) / 2 + 1 ; // CONCURRENT _ BELOW BoofConcurrency . loopBlocks ( radius , output . height - radius , w , ( y0 , y1 ) - > { final int y0 = radius , y1 = input . height - radius ; int [ ] histogram = _work . pop ( ) ; for ( int y = y0 ; y < y1 ; y ++ ) { int seed = input . startIndex + ( y - radius ) * input . stride ; Arrays . fill ( histogram , 0 ) ; // compute the median value for the first x component and initialize the system for ( int i = 0 ; i < w ; i ++ ) { int idx = seed + i * input . stride ; int end = idx + w ; while ( idx < end ) { histogram [ ( input . data [ idx ++ ] & 0xFF ) ] ++ ; } } // Compute the median value int count = 0 , median = 0 ; while ( true ) { count += histogram [ median ] ; if ( count >= threshold ) break ; median ++ ; } output . data [ output . startIndex + y * output . stride + radius ] = ( byte ) median ; // remove the left most pixel from the histogram count += removeSide ( input . data , input . stride , w , histogram , seed , median ) ; for ( int x = radius + 1 ; x < input . width - radius ; x ++ ) { seed = input . startIndex + ( y - radius ) * input . stride + ( x - radius ) ; // add the right most pixels to the histogram count += addSide ( input . data , input . stride , w , histogram , seed + w - 1 , median ) ; // find the median , using the previous solution as a starting point if ( count >= threshold ) { while ( count >= threshold ) { count -= histogram [ median -- ] ; } median += 1 ; count += histogram [ median ] ; } else { while ( count < threshold ) { median += 1 ; count += histogram [ median ] ; } } output . data [ output . startIndex + y * output . stride + x ] = ( byte ) median ; // remove the left most pixels from the histogram count += removeSide ( input . data , input . stride , w , histogram , seed , median ) ; } } // CONCURRENT _ ABOVE } } ) ;
public class ClosureCodingConvention { /** * { @ inheritDoc } * < p > Understands several different inheritance patterns that occur in * Google code ( various uses of { @ code inherits } and { @ code mixin } ) . */ @ Override public SubclassRelationship getClassesDefinedByCall ( Node callNode ) { } }
SubclassRelationship relationship = super . getClassesDefinedByCall ( callNode ) ; if ( relationship != null ) { return relationship ; } Node callName = callNode . getFirstChild ( ) ; SubclassType type = typeofClassDefiningName ( callName ) ; if ( type != null ) { Node subclass = null ; Node superclass = callNode . getLastChild ( ) ; // There are four possible syntaxes for a class - defining method : // goog . inherits ( SubClass , SuperClass ) // goog $ inherits ( SubClass , SuperClass ) // goog . mixin ( SubClass . prototype , SuperClass . prototype ) // goog $ mixin ( SubClass . prototype , SuperClass . prototype ) if ( callNode . hasXChildren ( 3 ) ) { // goog . inherits ( SubClass , SuperClass ) subclass = callName . getNext ( ) ; } else { return null ; } if ( type == SubclassType . MIXIN ) { // Only consider mixins that mix two prototypes as related to // inheritance . if ( ! endsWithPrototype ( superclass ) ) { return null ; } if ( ! endsWithPrototype ( subclass ) ) { return null ; } // Strip off the prototype from the name . subclass = subclass . getFirstChild ( ) ; superclass = superclass . getFirstChild ( ) ; } // bail out if either of the side of the " inherits " // isn ' t a real class name . This prevents us from // doing something weird in cases like : // goog . inherits ( MySubClass , cond ? SuperClass1 : BaseClass2) if ( subclass != null && subclass . isUnscopedQualifiedName ( ) && superclass . isUnscopedQualifiedName ( ) ) { return new SubclassRelationship ( type , subclass , superclass ) ; } } return null ;
public class StaticCATConsumer { /** * Deletes a set of messages that are currently locked by the * server . * Fields : * BIT16 ConnectionObjectId * BIT16 ConsumerSessionId * BIT32 TranasctionId * BIT32 ArrayCount * BIT32 [ ] MsgIds * @ param request * @ param conversation * @ param requestNumber * @ param allocatedFromBufferPool * @ param partOfExchange * @ param optimizedTx */ static void rcvDeleteSet ( CommsServerByteBuffer request , Conversation conversation , int requestNumber , boolean allocatedFromBufferPool , boolean partOfExchange ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "rcvDeleteSet" , new Object [ ] { request , conversation , "" + requestNumber } ) ; ServerLinkLevelState linkState = ( ServerLinkLevelState ) conversation . getLinkLevelAttachment ( ) ; final boolean optimizedTx = CommsUtils . requiresOptimizedTransaction ( conversation ) ; short connectionObjectId = request . getShort ( ) ; // BIT16 ConnectionObjectId short consumerObjectId = request . getShort ( ) ; // BIT16 ConsumerSessionId int transactionId = request . getSITransactionId ( connectionObjectId , linkState , optimizedTx ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { SibTr . debug ( tc , "connectionObjectId" , connectionObjectId ) ; SibTr . debug ( tc , "consumerObjectId" , consumerObjectId ) ; SibTr . debug ( tc , "transactionId" , transactionId ) ; } SIMessageHandle [ ] siMsgHandles = request . getSIMessageHandles ( ) ; CATMainConsumer mainConsumer = ( CATMainConsumer ) ( ( ConversationState ) conversation . getAttachment ( ) ) . getObject ( consumerObjectId ) ; // If the transaction id is not null then retrieve the // transaction object from the table in the link level mainConsumer . deleteSet ( requestNumber , siMsgHandles , transactionId , partOfExchange ) ; // f187521.2.1 , F219476.2 request . release ( allocatedFromBufferPool ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "rcvDeleteSet" ) ;
public class LogRef { /** * * Returns a human readable name for the bundle that * < code > bc < / code > * represents . * * @ return Name of the bundle that uses this wrapper * ( at least 12 * characters ) . */ private String getBundleName ( ) { } }
StringBuffer bundleName = new StringBuffer ( 24 ) ; // We can ' t get bundle - name since it requires AdminPermission . // bundleName . append ( ( String ) bc . getBundle ( ) . getHeaders ( ) . get ( " Bundle - Name " ) ) ; // If name was not found use the Bid as name . if ( bundleName . length ( ) <= 0 ) { bundleName . append ( "bid#" ) ; bundleName . append ( String . valueOf ( bundleId ) ) ; } if ( bundleName . length ( ) < 12 ) { bundleName . append ( " " ) ; bundleName . setLength ( 12 ) ; } return bundleName . toString ( ) ;
public class ConnectionManager { /** * Finds a valid player number that is currently visible but which is different from the one specified , so it can * be used as the source player for a query being sent to the specified one . If the virtual CDJ is running on an * acceptable player number ( which must be 1-4 to request metadata from an actual CDJ , but can be anything if we * are talking to rekordbox ) , uses that , since it will always be safe . Otherwise , tries to borrow the player number * of another actual CDJ on the network , but we can ' t do that if the player we want to impersonate has mounted * a track from the player that we want to talk to . * @ param targetPlayer the player to which a metadata query is being sent * @ return some other currently active player number , ideally not a real player , but sometimes we have to * @ throws IllegalStateException if there is no other player number available to use */ private int chooseAskingPlayerNumber ( int targetPlayer ) { } }
final int fakeDevice = VirtualCdj . getInstance ( ) . getDeviceNumber ( ) ; if ( ( targetPlayer > 15 ) || ( fakeDevice >= 1 && fakeDevice <= 4 ) ) { return fakeDevice ; } for ( DeviceAnnouncement candidate : DeviceFinder . getInstance ( ) . getCurrentDevices ( ) ) { final int realDevice = candidate . getNumber ( ) ; if ( realDevice != targetPlayer && realDevice >= 1 && realDevice <= 4 ) { final DeviceUpdate lastUpdate = VirtualCdj . getInstance ( ) . getLatestStatusFor ( realDevice ) ; if ( lastUpdate instanceof CdjStatus && ( ( CdjStatus ) lastUpdate ) . getTrackSourcePlayer ( ) != targetPlayer ) { return candidate . getNumber ( ) ; } } } throw new IllegalStateException ( "No player number available to query player " + targetPlayer + ". If such a player is present on the network, it must be using Link to play a track from " + "our target player, so we can't steal its channel number." ) ;
public class XmRequestContextInterceptor { /** * { @ inheritDoc } */ @ Override public boolean preHandle ( HttpServletRequest request , HttpServletResponse response , Object handler ) throws IOException { } }
LOGGER . debug ( "Init XM request context for {} interceptor" , requestSourceType ) ; getXmPrivilegedRequestContext ( ) . putValue ( contextRequestSourceKey , requestSourceType ) ; return true ;
public class LssClient { /** * Delete your live presets by live preset name . * @ param name Live preset name . * @ return the response */ public DeletePresetResponse deletePreset ( String name ) { } }
DeletePresetRequest request = new DeletePresetRequest ( ) ; request . setName ( name ) ; return deletePreset ( request ) ;
public class DefaultGroovyMethods { /** * Creates a spreadable map from this array . * @ param self an object array * @ return a newly created SpreadMap * @ see groovy . lang . SpreadMap # SpreadMap ( java . lang . Object [ ] ) * @ see # toSpreadMap ( java . util . Map ) * @ since 1.0 */ public static SpreadMap toSpreadMap ( Object [ ] self ) { } }
if ( self == null ) throw new GroovyRuntimeException ( "Fail to convert Object[] to SpreadMap, because it is null." ) ; else if ( self . length % 2 != 0 ) throw new GroovyRuntimeException ( "Fail to convert Object[] to SpreadMap, because it's size is not even." ) ; else return new SpreadMap ( self ) ;
public class Workflow { /** * Internal use only - called by the processing engine * @ param r * response to be put into the response map . */ public void putResponse ( Response < ? > r ) { } }
synchronized ( responseMap ) { List < Response < ? > > l = responseMap . get ( r . getCorrelationId ( ) ) ; if ( l == null ) { l = new SortedResponseList ( ) ; responseMap . put ( r . getCorrelationId ( ) , l ) ; } l . add ( r ) ; }
public class DefaultGroovyMethods { /** * Iterates through the Iterator transforming items using the supplied closure * and collecting any non - null results . * @ param self an Iterator * @ param filteringTransform a Closure that should return either a non - null transformed value or null for items which should be discarded * @ return the list of non - null transformed values * @ since 2.5.0 */ public static < T , U > Collection < T > findResults ( Iterator < U > self , @ ClosureParams ( FirstParam . FirstGenericType . class ) Closure < T > filteringTransform ) { } }
List < T > result = new ArrayList < T > ( ) ; while ( self . hasNext ( ) ) { U value = self . next ( ) ; T transformed = filteringTransform . call ( value ) ; if ( transformed != null ) { result . add ( transformed ) ; } } return result ;
public class RecurrencePickerDialog { /** * End spinner */ @ Override public void onItemSelected ( AdapterView < ? > parent , View view , int position , long id ) { } }
if ( parent == mFreqSpinner ) { mModel . freq = position ; } else if ( parent == mEndSpinner ) { switch ( position ) { case RecurrenceModel . END_NEVER : mModel . end = RecurrenceModel . END_NEVER ; break ; case RecurrenceModel . END_BY_DATE : mModel . end = RecurrenceModel . END_BY_DATE ; break ; case RecurrenceModel . END_BY_COUNT : mModel . end = RecurrenceModel . END_BY_COUNT ; if ( mModel . endCount <= 1 ) { mModel . endCount = 1 ; } else if ( mModel . endCount > COUNT_MAX ) { mModel . endCount = COUNT_MAX ; } updateEndCountText ( ) ; break ; } mEndCount . setVisibility ( mModel . end == RecurrenceModel . END_BY_COUNT ? View . VISIBLE : View . GONE ) ; mEndDateTextView . setVisibility ( mModel . end == RecurrenceModel . END_BY_DATE ? View . VISIBLE : View . GONE ) ; mPostEndCount . setVisibility ( mModel . end == RecurrenceModel . END_BY_COUNT && ! mHidePostEndCount ? View . VISIBLE : View . GONE ) ; } updateDialog ( ) ;
public class AtlasClient { /** * Delete the specified entities from the repository * @ param guids guids of entities to delete * @ return List of entity ids updated / deleted * @ throws AtlasServiceException */ public EntityResult deleteEntities ( final String ... guids ) throws AtlasServiceException { } }
LOG . debug ( "Deleting entities: {}" , guids ) ; JSONObject jsonResponse = callAPIWithRetries ( API . DELETE_ENTITIES , null , new ResourceCreator ( ) { @ Override public WebResource createResource ( ) { API api = API . DELETE_ENTITIES ; WebResource resource = getResource ( api ) ; for ( String guid : guids ) { resource = resource . queryParam ( GUID . toLowerCase ( ) , guid ) ; } return resource ; } } ) ; EntityResult results = extractEntityResult ( jsonResponse ) ; LOG . debug ( "Delete entities returned results: {}" , results ) ; return results ;
public class CompilationUnit { /** * Configures its debugging mode and classloader classpath from a given compiler configuration . * This cannot be done more than once due to limitations in { @ link java . net . URLClassLoader URLClassLoader } . */ public void configure ( CompilerConfiguration configuration ) { } }
super . configure ( configuration ) ; this . debug = configuration . getDebug ( ) ; if ( ! this . configured && this . classLoader instanceof GroovyClassLoader ) { appendCompilerConfigurationClasspathToClassLoader ( configuration , ( GroovyClassLoader ) this . classLoader ) ; } this . configured = true ;
public class DeregisterElasticIpRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( DeregisterElasticIpRequest deregisterElasticIpRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( deregisterElasticIpRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( deregisterElasticIpRequest . getElasticIp ( ) , ELASTICIP_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class AntXmlReport { /** * Apply filters to a method name . * @ param methodName */ private String applyFilters ( String methodName ) { } }
if ( filters . isEmpty ( ) ) { return methodName ; } Reader in = new StringReader ( methodName ) ; for ( TokenFilter tf : filters ) { in = tf . chain ( in ) ; } try { return CharStreams . toString ( in ) ; } catch ( IOException e ) { junit4 . log ( "Could not apply filters to " + methodName + ": " + Throwables . getStackTraceAsString ( e ) , Project . MSG_WARN ) ; return methodName ; }
public class GenericHibernateDao { /** * Returns a list of entity objects that have a collection named * < code > fieldName < / code > , which contains the passed * < code > subElement < / code > . * The can e . g . be used to return all applications that contain a certain layer . * @ param fieldName The name of the collection field * @ param subElement The element that should be contained in the collection * @ param criterion Additional criterions to apply ( optional ) * @ return The list of objects */ @ SuppressWarnings ( "unchecked" ) public List < E > findAllWithCollectionContaining ( String fieldName , PersistentObject subElement , Criterion ... criterion ) { } }
final Class < ? extends PersistentObject > subElementType = subElement . getClass ( ) ; final boolean isCollectionField = EntityUtil . isCollectionField ( entityClass , fieldName , subElementType , true ) ; if ( ! isCollectionField ) { String errorMsg = String . format ( "There is no collection field '%s' with element type '%s' in the type '%s'" , fieldName , subElementType . getName ( ) , entityClass . getName ( ) ) ; throw new IllegalArgumentException ( errorMsg ) ; } Criteria criteria = createDistinctRootEntityCriteria ( criterion ) ; criteria . createAlias ( fieldName , "sub" ) ; criteria . add ( Restrictions . eq ( "sub.id" , subElement . getId ( ) ) ) ; return ( List < E > ) criteria . list ( ) ;
public class AliPayApi { /** * 统一收单线下交易预创建 适用于 : 扫码支付等 * @ param model * { AlipayTradePrecreateModel } * @ param notifyUrl * 异步通知URL * @ return { AlipayTradePrecreateResponse } * @ throws { AlipayApiException } */ public static AlipayTradePrecreateResponse tradePrecreatePayToResponse ( AlipayTradePrecreateModel model , String notifyUrl ) throws AlipayApiException { } }
AlipayTradePrecreateRequest request = new AlipayTradePrecreateRequest ( ) ; request . setBizModel ( model ) ; request . setNotifyUrl ( notifyUrl ) ; return AliPayApiConfigKit . getAliPayApiConfig ( ) . getAlipayClient ( ) . execute ( request ) ;
public class CompleteLayerUploadRequest { /** * The < code > sha256 < / code > digest of the image layer . * @ param layerDigests * The < code > sha256 < / code > digest of the image layer . */ public void setLayerDigests ( java . util . Collection < String > layerDigests ) { } }
if ( layerDigests == null ) { this . layerDigests = null ; return ; } this . layerDigests = new java . util . ArrayList < String > ( layerDigests ) ;