signature stringlengths 43 39.1k | implementation stringlengths 0 450k |
|---|---|
public class FragmentSelectorMarshaller { /** * Marshall the given parameter object . */
public void marshall ( FragmentSelector fragmentSelector , ProtocolMarshaller protocolMarshaller ) { } } | if ( fragmentSelector == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( fragmentSelector . getFragmentSelectorType ( ) , FRAGMENTSELECTORTYPE_BINDING ) ; protocolMarshaller . marshall ( fragmentSelector . getTimestampRange ( ) , TIMESTAMPRANGE_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class PairTable { /** * Rebuilds to a larger size . NumItems and validBits remain unchanged .
* @ param newLgSizeInts the new size
* @ return a larger PairTable */
PairTable rebuild ( final int newLgSizeInts ) { } } | checkLgSizeInts ( newLgSizeInts ) ; final int newSize = 1 << newLgSizeInts ; final int oldSize = 1 << lgSizeInts ; rtAssert ( newSize > numPairs ) ; final int [ ] oldSlotsArr = slotsArr ; slotsArr = new int [ newSize ] ; Arrays . fill ( slotsArr , - 1 ) ; lgSizeInts = newLgSizeInts ; for ( int i = 0 ; i < oldSize ; i ++ ) { final int item = oldSlotsArr [ i ] ; if ( item != - 1 ) { mustInsert ( this , item ) ; } } return this ; |
public class MetadataVersion { /** * Returns the size of the smallest array .
* @ param arr1 the first array
* @ param arr2 the second array
* @ return the size of the smallest array */
private int getLeastCommonArrayLength ( String [ ] arr1 , String [ ] arr2 ) { } } | return arr1 . length <= arr2 . length ? arr1 . length : arr2 . length ; |
public class PerformanceTarget { /** * Gets the spendTargetType value for this PerformanceTarget .
* @ return spendTargetType * < span class = " constraint Selectable " > This field can be selected
* using the value " SpendTargetType " . < / span > < span class = " constraint Filterable " > This
* field can be filtered on . < / span > */
public com . google . api . ads . adwords . axis . v201809 . cm . SpendTargetType getSpendTargetType ( ) { } } | return spendTargetType ; |
public class AbstractDecorator { /** * Adds a new listener , which should be notified , when an area is modified by the dialog .
* @ param listener
* The listener , which should be added , as an instance of the type { @ link AreaListener } .
* The listener may not be null */
public final void addAreaListener ( @ NonNull final AreaListener listener ) { } } | Condition . INSTANCE . ensureNotNull ( listener , "The listener may not be null" ) ; this . areaListeners . add ( listener ) ; |
public class MutableFst { /** * Make a deep copy of the given FST . The symbol tables are " effectively deep copied " meaning that they will
* take advantage of any immutable tables ( using the UnionSymbolTable ) to avoid doing a large deep copy .
* @ param fst
* @ return */
public static MutableFst copyFrom ( Fst fst ) { } } | MutableFst copy = emptyWithCopyOfSymbols ( fst ) ; // build up states
for ( int i = 0 ; i < fst . getStateCount ( ) ; i ++ ) { State source = fst . getState ( i ) ; MutableState target = new MutableState ( source . getArcCount ( ) ) ; target . setFinalWeight ( source . getFinalWeight ( ) ) ; copy . setState ( i , target ) ; } // build arcs now that we have target state refs
for ( int i = 0 ; i < fst . getStateCount ( ) ; i ++ ) { State source = fst . getState ( i ) ; MutableState target = copy . getState ( i ) ; for ( int j = 0 ; j < source . getArcCount ( ) ; j ++ ) { Arc sarc = source . getArc ( j ) ; MutableState nextTargetState = copy . getState ( sarc . getNextState ( ) . getId ( ) ) ; MutableArc tarc = new MutableArc ( sarc . getIlabel ( ) , sarc . getOlabel ( ) , sarc . getWeight ( ) , nextTargetState ) ; target . addArc ( tarc ) ; } } MutableState newStart = copy . getState ( fst . getStartState ( ) . getId ( ) ) ; copy . setStart ( newStart ) ; return copy ; |
public class Nfs3 { /** * / * ( non - Javadoc )
* @ see com . emc . ecs . nfsclient . nfs . Nfs # getReadlink ( com . emc . ecs . nfsclient . nfs . NfsReadlinkRequest ) */
public Nfs3ReadlinkResponse getReadlink ( NfsReadlinkRequest request ) throws IOException { } } | Nfs3ReadlinkResponse response = new Nfs3ReadlinkResponse ( ) ; _rpcWrapper . callRpcNaked ( request , response ) ; return response ; |
public class JsonHash { /** * put with State .
* @ param key
* @ param value
* @ param state
* @ return The instance of the value for the given key
* @ deprecated { @ link State } is confuse the users . replace to { @ link Type } in about { @ link JsonHash } . since 1.4.12. */
@ Deprecated public Object put ( String key , Object value , State state ) { } } | return put ( key , value , Type . from ( state ) ) ; |
public class BaseValidator { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
public boolean validateSF ( SF sf , DiagnosticChain diagnostics , Map < Object , Object > context ) { } } | return validate_EveryDefaultConstraint ( sf , diagnostics , context ) ; |
public class Type1Font { /** * Outputs to the writer the font dictionaries and streams .
* @ param writer the writer for this document
* @ param ref the font indirect reference
* @ param params several parameters that depend on the font type
* @ throws IOException on error
* @ throws DocumentException error in generating the object */
void writeFont ( PdfWriter writer , PdfIndirectReference ref , Object params [ ] ) throws DocumentException , IOException { } } | int firstChar = ( ( Integer ) params [ 0 ] ) . intValue ( ) ; int lastChar = ( ( Integer ) params [ 1 ] ) . intValue ( ) ; byte shortTag [ ] = ( byte [ ] ) params [ 2 ] ; boolean subsetp = ( ( Boolean ) params [ 3 ] ) . booleanValue ( ) && subset ; if ( ! subsetp ) { firstChar = 0 ; lastChar = shortTag . length - 1 ; for ( int k = 0 ; k < shortTag . length ; ++ k ) shortTag [ k ] = 1 ; } PdfIndirectReference ind_font = null ; PdfObject pobj = null ; PdfIndirectObject obj = null ; pobj = getFullFontStream ( ) ; if ( pobj != null ) { obj = writer . addToBody ( pobj ) ; ind_font = obj . getIndirectReference ( ) ; } pobj = getFontDescriptor ( ind_font ) ; if ( pobj != null ) { obj = writer . addToBody ( pobj ) ; ind_font = obj . getIndirectReference ( ) ; } pobj = getFontBaseType ( ind_font , firstChar , lastChar , shortTag ) ; writer . addToBody ( pobj , ref ) ; |
public class ContentUriChecker { /** * Analyze path internal .
* @ param < L > the generic type
* @ param input the input
* @ param listener the listener */
private < L extends UriBaseListener > void analyzePathInternal ( final String input , L listener ) { } } | pathSegmentIndex = - 1 ; walker . walk ( listener , preparePath ( input ) . value0 ) ; |
public class JoinProducerBase { /** * Completed all criteria : Kill the watchdog and inform the site . */
protected void setJoinComplete ( SiteProcedureConnection siteConnection , Map < String , Map < Integer , Pair < Long , Long > > > exportSequenceNumbers , Map < Integer , Long > drSequenceNumbers , Map < Integer , Map < Integer , Map < Integer , DRSiteDrIdTracker > > > allConsumerSiteTrackers , boolean requireExistingSequenceNumbers , long clusterCreateTime ) { } } | siteConnection . setRejoinComplete ( m_completionAction , exportSequenceNumbers , drSequenceNumbers , allConsumerSiteTrackers , requireExistingSequenceNumbers , clusterCreateTime ) ; |
public class ProtocolBuffers { /** * Parses protocol buffer content from an input stream ( closing the input stream ) into a protocol
* buffer message .
* @ param < T > destination message type
* @ param messageClass destination message class that has a { @ code parseFrom ( InputStream ) } public
* static method
* @ return new instance of the parsed destination message class */
public static < T extends MessageLite > T parseAndClose ( InputStream inputStream , Class < T > messageClass ) throws IOException { } } | try { Method newBuilder = messageClass . getDeclaredMethod ( "parseFrom" , InputStream . class ) ; return messageClass . cast ( newBuilder . invoke ( null , inputStream ) ) ; } catch ( Exception e ) { Throwables . propagateIfPossible ( e , IOException . class ) ; IOException io = new IOException ( "Error parsing message of type " + messageClass ) ; io . initCause ( e ) ; throw io ; } finally { inputStream . close ( ) ; } |
public class PatternMatchingSupport { /** * Returns true only if the value matches the regular expression
* only once and exactly .
* @ param valstring value that may match the expression
* @ param regexp regular expression
* @ return true if val matches regular expression regexp */
public static boolean valueMatchesRegularExpression ( String val , String regexp ) { } } | Pattern p = cache . get ( regexp ) ; if ( p == null ) { p = Pattern . compile ( regexp ) ; cache . put ( regexp , p ) ; } return valueMatchesRegularExpression ( val , p ) ; |
public class CmsXmlContentValueSequence { /** * Adds a value element of the given type
* at the selected index to the XML content document . < p >
* @ param cms the current users OpenCms context
* @ param type the type to add
* @ param index the index where to add the new value element
* @ return the added XML content value element
* @ see CmsXmlContent # addValue ( CmsObject , String , Locale , int )
* @ see # addValue ( CmsObject , String , int )
* @ see # addValue ( CmsObject , int ) */
public I_CmsXmlContentValue addValue ( CmsObject cms , I_CmsXmlSchemaType type , int index ) { } } | String xpath = CmsXmlUtils . concatXpath ( CmsXmlUtils . removeLastXpathElement ( getPath ( ) ) , type . getName ( ) ) ; return addValue ( cms , xpath , index ) ; |
public class SegmentAggregator { /** * Attempts to reconcile data and attributes for the given AggregatedAppendOperation . Since Append Operations can be partially
* flushed , reconciliation may be for the full operation or for a part of it .
* @ param op The AggregatedAppendOperation to reconcile .
* @ param storageInfo The current state of the Segment in Storage .
* @ param timer Timer for the operation .
* @ return A CompletableFuture containing a FlushResult with the number of bytes reconciled , or failed with a ReconciliationFailureException ,
* if the operation cannot be reconciled , based on the in - memory metadata or the current state of the Segment in Storage . */
private CompletableFuture < WriterFlushResult > reconcileAppendOperation ( AggregatedAppendOperation op , SegmentProperties storageInfo , TimeoutTimer timer ) { } } | CompletableFuture < Boolean > reconcileResult ; WriterFlushResult flushResult = new WriterFlushResult ( ) ; if ( op . getLength ( ) > 0 ) { // This operation has data . Reconcile that first .
reconcileResult = reconcileData ( op , storageInfo , timer ) . thenApply ( reconciledBytes -> { flushResult . withFlushedBytes ( reconciledBytes ) ; return reconciledBytes >= op . getLength ( ) && op . getLastStreamSegmentOffset ( ) <= storageInfo . getLength ( ) ; } ) ; } else { // No data to reconcile , so we consider this part done .
reconcileResult = CompletableFuture . completedFuture ( true ) ; } if ( ! op . attributes . isEmpty ( ) ) { // This operation has Attributes . Reconcile them , but only if the data reconciliation succeeded for the whole operation .
reconcileResult = reconcileResult . thenComposeAsync ( fullyReconciledData -> { if ( fullyReconciledData ) { return reconcileAttributes ( op , timer ) . thenApply ( v -> { flushResult . withFlushedAttributes ( op . attributes . size ( ) ) ; return fullyReconciledData ; } ) ; } else { return CompletableFuture . completedFuture ( fullyReconciledData ) ; } } , this . executor ) ; } return reconcileResult . thenApplyAsync ( fullyReconciled -> { if ( fullyReconciled ) { // Operation has been completely validated ; pop it off the list .
StorageOperation removedOp = this . operations . removeFirst ( ) ; assert op == removedOp : "Reconciled operation is not the same as removed operation" ; } return flushResult ; } ) ; |
public class MailService { /** * Tries to find messages for the specified mail account applying the specified
* { @ code condition } until it times out using the default timeout (
* { @ link EmailConstants # MAIL _ TIMEOUT _ SECONDS } and { @ link EmailConstants # MAIL _ SLEEP _ MILLIS } ) .
* < b > Note : < / b > < br / >
* This method uses the specified mail account independently without reservation . If , however ,
* the specified mail account has been reserved by any thread ( including the current one ) , an
* { @ link IllegalStateException } is thrown . < / p >
* @ param mailAccount
* the mail account
* @ param condition
* the condition a message must meet
* @ return an immutable list of mail messagess */
public List < MailMessage > findMessages ( final MailAccount mailAccount , final Predicate < MailMessage > condition ) { } } | return findMessages ( mailAccount , condition , defaultTimeoutSeconds ) ; |
public class StripeApiHandler { /** * Create a { @ link Source } using the input { @ link SourceParams } .
* @ param sourceParams a { @ link SourceParams } object with { @ link Source } creation params
* @ param publishableKey an API key
* @ param stripeAccount a connected Stripe Account ID
* @ return a { @ link Source } if one could be created from the input params ,
* or { @ code null } if not
* @ throws AuthenticationException if there is a problem authenticating to the Stripe API
* @ throws InvalidRequestException if one or more of the parameters is incorrect
* @ throws APIConnectionException if there is a problem connecting to the Stripe API
* @ throws APIException for unknown Stripe API errors . These should be rare . */
@ Nullable Source createSource ( @ NonNull SourceParams sourceParams , @ NonNull String publishableKey , @ Nullable String stripeAccount ) throws AuthenticationException , InvalidRequestException , APIConnectionException , APIException { } } | final Map < String , Object > paramMap = sourceParams . toParamMap ( ) ; mNetworkUtils . addUidParams ( paramMap ) ; final RequestOptions options = RequestOptions . builder ( publishableKey , stripeAccount , RequestOptions . TYPE_QUERY ) . build ( ) ; try { final String apiKey = options . getPublishableApiKey ( ) ; if ( StripeTextUtils . isBlank ( apiKey ) ) { return null ; } logTelemetryData ( ) ; final Map < String , Object > loggingParams = mLoggingUtils . getSourceCreationParams ( null , apiKey , sourceParams . getType ( ) ) ; final RequestOptions loggingOptions = RequestOptions . builder ( publishableKey ) . build ( ) ; logApiCall ( loggingParams , loggingOptions ) ; final StripeResponse response = requestData ( RequestExecutor . RestMethod . POST , getSourcesUrl ( ) , paramMap , options ) ; return Source . fromString ( response . getResponseBody ( ) ) ; } catch ( CardException unexpected ) { // This particular kind of exception should not be possible from a Source API endpoint .
throw new APIException ( unexpected . getMessage ( ) , unexpected . getRequestId ( ) , unexpected . getStatusCode ( ) , null , unexpected ) ; } |
public class StringUtil { /** * This function returns a string with whitespace stripped from the end of str
* @ param str String to clean
* @ return cleaned String */
public static String rtrim ( String str , String defaultValue ) { } } | if ( str == null ) return defaultValue ; int len = str . length ( ) ; while ( ( 0 < len ) && ( str . charAt ( len - 1 ) <= ' ' ) ) { len -- ; } return ( len < str . length ( ) ) ? str . substring ( 0 , len ) : str ; |
public class StageInfo { /** * Creates a StageInfo object
* Validates that the necessary Stage info arguments are specified
* @ param locationType the type of stage , i . e . AZURE / S3
* @ param location The container / bucket
* @ param credentials Map of cloud provider credentials
* @ param region The geographic region where the stage is located ( S3 only )
* @ param endPoint The Azure Storage end point ( Azure only )
* @ param storageAccount The Azure Storage account ( azure only )
* @ throws IllegalArgumentException one or more parameters required were missing */
public static StageInfo createStageInfo ( String locationType , String location , Map credentials , String region , String endPoint , String storageAccount ) throws IllegalArgumentException { } } | StageType stageType ; // Ensure that all the required parameters are specified
switch ( locationType ) { case "AZURE" : stageType = StageType . AZURE ; if ( ! isSpecified ( location ) || ! isSpecified ( endPoint ) || ! isSpecified ( storageAccount ) || credentials == null ) { throw new IllegalArgumentException ( "Incomplete parameters specified for Azure stage" ) ; } break ; case "S3" : stageType = StageType . S3 ; if ( ! isSpecified ( location ) || ! isSpecified ( region ) || credentials == null ) { throw new IllegalArgumentException ( "Incomplete parameters specified for S3 stage" ) ; } break ; case "LOCAL_FS" : stageType = StageType . LOCAL_FS ; if ( ! isSpecified ( location ) ) { throw new IllegalArgumentException ( "Incomplete parameters specific for local stage" ) ; } break ; default : throw new IllegalArgumentException ( "Invalid stage type: " + locationType ) ; } return new StageInfo ( stageType , location , credentials , region , endPoint , storageAccount ) ; |
public class InjectionProcessorManager { /** * Process annotations on fields and methods .
* @ param members the Field [ ] or Method [ ] array
* @ param annotatedMembers the same Field [ ] or Method [ ] array
* @ param processorIndex the index in { @ link # ivProcessors }
* @ param provider the corresponding provider */
private < A extends Annotation > void processMemberAnnotations ( int processorIndex , InjectionProcessorProvider < A , ? > provider , Member [ ] members , AnnotatedElement [ ] annotatedMembers ) throws InjectionException { } } | Class < A > annClass = provider . getAnnotationClass ( ) ; if ( annClass != null ) { for ( int i = 0 ; i < members . length ; i ++ ) { A ann = annotatedMembers [ i ] . getAnnotation ( annClass ) ; if ( ann != null ) { Member member = members [ i ] ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) Tr . debug ( tc , "found member annotation " + toStringSecure ( ann ) + " on " + member ) ; InjectionProcessor < A , ? > processor = getProcessor ( processorIndex , provider ) ; addOrMergeInjectionBinding ( processorIndex , processor , member . getDeclaringClass ( ) , member , ann ) ; } } } |
public class CommerceRegionPersistenceImpl { /** * Returns the commerce region where commerceCountryId = & # 63 ; and code = & # 63 ; or throws a { @ link NoSuchRegionException } if it could not be found .
* @ param commerceCountryId the commerce country ID
* @ param code the code
* @ return the matching commerce region
* @ throws NoSuchRegionException if a matching commerce region could not be found */
@ Override public CommerceRegion findByC_C ( long commerceCountryId , String code ) throws NoSuchRegionException { } } | CommerceRegion commerceRegion = fetchByC_C ( commerceCountryId , code ) ; if ( commerceRegion == null ) { StringBundler msg = new StringBundler ( 6 ) ; msg . append ( _NO_SUCH_ENTITY_WITH_KEY ) ; msg . append ( "commerceCountryId=" ) ; msg . append ( commerceCountryId ) ; msg . append ( ", code=" ) ; msg . append ( code ) ; msg . append ( "}" ) ; if ( _log . isDebugEnabled ( ) ) { _log . debug ( msg . toString ( ) ) ; } throw new NoSuchRegionException ( msg . toString ( ) ) ; } return commerceRegion ; |
public class BTree { /** * Returns a new BTree with the provided set inserting / replacing as necessary any equal items
* @ param btree the tree to update
* @ param comparator the comparator that defines the ordering over the items in the tree
* @ param updateWith the items to either insert / update
* @ param updateWithIsSorted if false , updateWith will be copied and sorted to facilitate construction
* @ param < V >
* @ return */
public static < V > Object [ ] update ( Object [ ] btree , Comparator < V > comparator , Collection < V > updateWith , boolean updateWithIsSorted ) { } } | return update ( btree , comparator , updateWith , updateWithIsSorted , NoOp . < V > instance ( ) ) ; |
public class HtmlNotebookOutput { /** * Gets resource dir .
* @ return the resource dir */
@ javax . annotation . Nonnull public File getResourceDir ( ) { } } | @ javax . annotation . Nonnull final File etc = new File ( workingDir , "etc" ) ; etc . mkdirs ( ) ; return etc ; |
public class MultiFuture { /** * Method returns true if at least one subfutures was canceled . In case there are still processing futures
* remaining , than false is returned in any case . False is returned as well when all futures are done but none was canceled .
* @ return see method description . */
@ Override public boolean isCancelled ( ) { } } | boolean canceled = false ; for ( Future < FUTURE_TYPE > future : futureList ) { if ( ! future . isDone ( ) ) { return false ; } canceled |= future . isCancelled ( ) ; } return canceled ; |
public class JDayChooser { /** * Initializes both day names and weeks of the year . */
protected void initDecorations ( ) { } } | for ( int x = 0 ; x < 7 ; x ++ ) { days [ x ] . setContentAreaFilled ( decorationBackgroundVisible ) ; days [ x ] . setBorderPainted ( decorationBordersVisible ) ; days [ x ] . invalidate ( ) ; days [ x ] . repaint ( ) ; weeks [ x ] . setContentAreaFilled ( decorationBackgroundVisible ) ; weeks [ x ] . setBorderPainted ( decorationBordersVisible ) ; weeks [ x ] . invalidate ( ) ; weeks [ x ] . repaint ( ) ; } |
public class ServerRedirectService { /** * Return all server groups for a profile
* @ param profileId ID of profile
* @ return collection of ServerGroups for a profile */
public List < ServerGroup > tableServerGroups ( int profileId ) { } } | ArrayList < ServerGroup > serverGroups = new ArrayList < > ( ) ; PreparedStatement queryStatement = null ; ResultSet results = null ; try ( Connection sqlConnection = sqlService . getConnection ( ) ) { queryStatement = sqlConnection . prepareStatement ( "SELECT * FROM " + Constants . DB_TABLE_SERVER_GROUPS + " WHERE " + Constants . GENERIC_PROFILE_ID + " = ? " + "ORDER BY " + Constants . GENERIC_NAME ) ; queryStatement . setInt ( 1 , profileId ) ; results = queryStatement . executeQuery ( ) ; while ( results . next ( ) ) { ServerGroup curServerGroup = new ServerGroup ( results . getInt ( Constants . GENERIC_ID ) , results . getString ( Constants . GENERIC_NAME ) , results . getInt ( Constants . GENERIC_PROFILE_ID ) ) ; curServerGroup . setServers ( tableServers ( profileId , curServerGroup . getId ( ) ) ) ; serverGroups . add ( curServerGroup ) ; } } catch ( SQLException e ) { e . printStackTrace ( ) ; } finally { try { if ( results != null ) { results . close ( ) ; } } catch ( Exception e ) { } try { if ( queryStatement != null ) { queryStatement . close ( ) ; } } catch ( Exception e ) { } } return serverGroups ; |
public class DateTag { /** * setter for value , updates the size of this tag
* @ param value
* - value to be set
* @ return - this for chaining */
public DateTag setValue ( final Date value ) { } } | this . value = value ; super . setValue ( ( value . getTime ( ) - DELAY ) * NANO_MULTIPLIER ) ; return this ; |
public class ExpressionParser { /** * * * * * * utility methods * * * * * */
private static Parser < Expression > compare ( Parser < Expression > operand , String name , Op op ) { } } | return Parsers . sequence ( operand , term ( name ) . retn ( op ) , operand , BinaryExpression :: new ) ; |
public class RemoteConsumerReceiver { /** * / * ( non - Javadoc )
* @ see com . ibm . ws . sib . processor . runtime . SIMPControllable # getId ( ) */
public String getId ( ) { } } | if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "getId" ) ; String id = aiStream . getStreamId ( ) . toString ( ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "getId" ) ; return id ; |
public class ConcurrentTask { /** * / * ( non - Javadoc )
* @ see org . danann . cernunnos . Task # perform ( org . danann . cernunnos . TaskRequest , org . danann . cernunnos . TaskResponse ) */
public void perform ( final TaskRequest req , final TaskResponse res ) { } } | final ExecutorService executorService = ( ExecutorService ) executorServicePhrase . evaluate ( req , res ) ; final boolean failFast = Boolean . parseBoolean ( ( String ) this . failFastPhrase . evaluate ( req , res ) ) ; // Copy all attributes into the response to ensure changes to higher level request objects don ' t
// break execution of child tasks .
final Map < String , Object > attributes = req . getAttributes ( ) ; for ( final Map . Entry < String , Object > attributeEntry : attributes . entrySet ( ) ) { res . setAttribute ( attributeEntry . getKey ( ) , attributeEntry . getValue ( ) ) ; } // Submit the sub - tasks to the thread pool
executorService . submit ( new Runnable ( ) { public void run ( ) { try { ConcurrentTask . this . performSubtasks ( req , res ) ; } catch ( Throwable t ) { if ( failFast ) { // We want this as close to the top of the catch as possible so the pool is shutdown as soon as possible
executorService . shutdown ( ) ; if ( executorService instanceof ThreadPoolExecutor ) { ( ( ThreadPoolExecutor ) executorService ) . getQueue ( ) . clear ( ) ; } log . debug ( "Shut down ExecutorService due to exception from ConcurrentTask subtask" ) ; } final String msg ; if ( failFast ) { msg = " The parent ExecutorService has been shutdown since fail-fast was set to true" ; } else { msg = " Conccurent execution will continue." ; } log . error ( "Exception thrown while performing subtask in its own thread." + msg , t ) ; } } } ) ; |
public class PropertyCache { /** * Get an array of { @ link java . beans . PropertyDescriptor } objects that
* describe JavaBean properties of the given < code > _ type < / code > . This
* array < b > should not < / b > be modified .
* @ param type the { @ link java . lang . Class } whose JavaBean properties to find
* @ return an array of { @ link java . beans . PropertyDescriptor } objects that describe the JavaBean properties */
public final PropertyDescriptor [ ] getPropertyDescriptors ( Class type ) { } } | CachedClass cc = getCachedClass ( type ) ; return ( cc != null ? cc . getPropertyDescriptors ( ) : null ) ; |
public class CmsShellCommands { /** * Exports a list of resources from the current site root to a ZIP file . < p >
* The resource names in the list must be separated with a " ; " . < p >
* @ param exportFile the name ( absolute path ) of the ZIP file to export to
* @ param pathList the list of resource to export , separated with a " ; "
* @ param isReducedExportMode flag , indicating if the reduced export mode should be used
* @ throws Exception if something goes wrong */
public void exportResources ( String exportFile , String pathList , boolean isReducedExportMode ) throws Exception { } } | StringTokenizer tok = new StringTokenizer ( pathList , ";" ) ; List < String > exportPaths = new ArrayList < String > ( ) ; while ( tok . hasMoreTokens ( ) ) { exportPaths . add ( tok . nextToken ( ) ) ; } boolean includeSystem = false ; if ( pathList . startsWith ( CmsWorkplace . VFS_PATH_SYSTEM ) || ( pathList . indexOf ( ";" + CmsWorkplace . VFS_PATH_SYSTEM ) > - 1 ) ) { includeSystem = true ; } CmsVfsImportExportHandler vfsExportHandler = new CmsVfsImportExportHandler ( ) ; CmsExportParameters params = new CmsExportParameters ( exportFile , null , true , false , false , exportPaths , includeSystem , true , 0 , true , false , isReducedExportMode ? ExportMode . REDUCED : ExportMode . DEFAULT ) ; vfsExportHandler . setExportParams ( params ) ; OpenCms . getImportExportManager ( ) . exportData ( m_cms , vfsExportHandler , new CmsShellReport ( m_cms . getRequestContext ( ) . getLocale ( ) ) ) ; |
public class Measure { /** * getter for accuracy - gets allows to store additional information about this
* measure , e . g . modifiers like ' ~ ' , ' + / - ' , ' approximately '
* @ generated
* @ return value of the feature */
public String getAccuracy ( ) { } } | if ( Measure_Type . featOkTst && ( ( Measure_Type ) jcasType ) . casFeat_accuracy == null ) jcasType . jcas . throwFeatMissing ( "accuracy" , "ch.epfl.bbp.uima.types.Measure" ) ; return jcasType . ll_cas . ll_getStringValue ( addr , ( ( Measure_Type ) jcasType ) . casFeatCode_accuracy ) ; |
public class DataStructHelper { /** * members in the form key , val , key , val etc . */
public Obj buildObject ( Object ... members ) { } } | Obj o = newObject ( ) ; for ( int i = 0 ; i < members . length ; i += 2 ) { o . put ( ( String ) members [ i ] , members [ i + 1 ] ) ; } return o ; |
public class FrequentlyUsedPolicy { /** * Evicts while the map exceeds the maximum capacity . */
private void evict ( Node candidate ) { } } | if ( data . size ( ) > maximumSize ) { Node victim = nextVictim ( candidate ) ; boolean admit = admittor . admit ( candidate . key , victim . key ) ; if ( admit ) { evictEntry ( victim ) ; } else { evictEntry ( candidate ) ; } policyStats . recordEviction ( ) ; } |
public class FileSystem { /** * copy a file from local to a file in this file system
* @ param delSrc if source should be deleted
* @ param overwrite if destination should be overwritten
* @ param validate if copied destination should be validated against source
* @ param src source file
* @ param dst destination path
* @ throws IOException */
public void copyFromLocalFile ( boolean delSrc , boolean overwrite , boolean validate , Path src , Path dst ) throws IOException { } } | Configuration conf = getConf ( ) ; FileUtil . copy ( getLocal ( conf ) , src , this , dst , delSrc , overwrite , validate , conf ) ; |
public class ElementBase { /** * Returns the first ancestor corresponding to the specified class .
* @ param < T > The type of ancestor sought .
* @ param clazz Class of ancestor sought .
* @ return An ancestor of the specified class or null if not found . */
@ SuppressWarnings ( "unchecked" ) public < T extends ElementBase > T getAncestor ( Class < T > clazz ) { } } | ElementBase parent = getParent ( ) ; while ( parent != null && ! clazz . isInstance ( parent ) ) { parent = parent . getParent ( ) ; } return ( T ) parent ; |
public class ENU { /** * Converts the wgs84 coordinate to ENU .
* @ param cLL the wgs84 coordinate .
* @ return the ENU coordinate .
* @ throws MatrixException */
public Coordinate wgs84ToEnu ( Coordinate cLL ) { } } | checkZ ( cLL ) ; Coordinate cEcef = wgs84ToEcef ( cLL ) ; Coordinate enu = ecefToEnu ( cEcef ) ; return enu ; |
public class Principals { /** * Extracts the simple principals of the collection of principals
* @ param principalProviders the principals to extract from
* @ return the simple principals */
public static Collection < SimplePrincipalProvider > getSimplePrincipals ( Collection < PrincipalProvider < ? > > principalProviders ) { } } | Collection < SimplePrincipalProvider > principals = new ArrayList < > ( ) ; for ( PrincipalProvider < ? > principal : principalProviders ) { if ( principal instanceof SimplePrincipalProvider ) { principals . add ( ( SimplePrincipalProvider ) principal ) ; } } return principals ; |
public class ObjectFactory { /** * Create an instance of { @ link JAXBElement } { @ code < } { @ link ArithType } { @ code > } } */
@ XmlElementDecl ( namespace = "http://www.w3.org/1998/Math/MathML" , name = "max" ) public JAXBElement < ArithType > createMax ( ArithType value ) { } } | return new JAXBElement < ArithType > ( _Max_QNAME , ArithType . class , null , value ) ; |
public class ASMifier { @ Override public void visitParameter ( String parameterName , int access ) { } } | buf . setLength ( 0 ) ; buf . append ( name ) . append ( ".visitParameter(" ) ; appendString ( buf , parameterName ) ; buf . append ( ", " ) ; appendAccess ( access ) ; text . add ( buf . append ( ");\n" ) . toString ( ) ) ; |
public class TriggerReader { /** * Make the request to the Twilio API to perform the read .
* @ param client TwilioRestClient with which to make the request
* @ return Trigger ResourceSet */
@ Override public ResourceSet < Trigger > read ( final TwilioRestClient client ) { } } | return new ResourceSet < > ( this , client , firstPage ( client ) ) ; |
public class TouchActions { /** * Allows the execution of single tap on the screen , analogous to click using a Mouse .
* @ param onElement the { @ link WebElement } on the screen .
* @ return self */
public TouchActions singleTap ( WebElement onElement ) { } } | if ( touchScreen != null ) { action . addAction ( new SingleTapAction ( touchScreen , ( Locatable ) onElement ) ) ; } tick ( touchPointer . createPointerDown ( 0 ) ) ; tick ( touchPointer . createPointerUp ( 0 ) ) ; return this ; |
public class JwtSSOTokenImpl { /** * ( non - Javadoc )
* @ see com . ibm . ws . security . jwt . sso . token . utils . JwtSSOToken #
* shouldSetJwtCookiePathToWebAppContext ( ) */
@ Override public boolean shouldSetJwtCookiePathToWebAppContext ( ) { } } | // TODO Auto - generated method stub
JwtSsoBuilderConfig jwtssobuilderConfig = getJwtSSOBuilderConfig ( ) ; if ( jwtssobuilderConfig != null ) { return jwtssobuilderConfig . isSetCookiePathToWebAppContextPath ( ) ; } return false ; |
public class BBRandomGenerator { /** * Generates random bytes filling the given buffer entirely
* @ param buffer The byte array to fill with random bytes */
public void getBytes ( byte [ ] buffer ) { } } | try { fipsGenerator . getBytes ( buffer ) ; } catch ( Throwable ex ) { BBPlatform . getPlatform ( ) . getLogger ( ) . logException ( "RandomGenerator.getBytes() Falling back to RandomSource... " + ex . getMessage ( ) ) ; RandomSource . getBytes ( buffer ) ; } |
public class Utils { /** * Convert a value to the requested descriptor . For null values where the caller needs a primitive , this returns the
* appropriate ( boxed ) default . This method will not attempt conversion , it is basically checking what to do if the
* result is null - and ensuring the caller gets back what they expect ( the appropriate primitive default ) .
* @ param value the value
* @ param desc the type the caller would like it to be
* @ return the converted value or possibly a default value for the type if the incoming value is null */
public static Object toResultCheckIfNull ( Object value , String desc ) { } } | if ( value == null ) { if ( desc . length ( ) == 1 ) { switch ( desc . charAt ( 0 ) ) { case 'I' : return DEFAULT_INT ; case 'B' : return DEFAULT_BYTE ; case 'C' : return DEFAULT_CHAR ; case 'S' : return DEFAULT_SHORT ; case 'J' : return DEFAULT_LONG ; case 'F' : return DEFAULT_FLOAT ; case 'D' : return DEFAULT_DOUBLE ; case 'Z' : return Boolean . FALSE ; default : throw new IllegalStateException ( "Invalid primitive descriptor " + desc ) ; } } else { return null ; } } else { return value ; } |
public class CleaneLingSolver { /** * Dumps a given clause .
* @ param c the clause */
private void dumpClause ( final CLClause c ) { } } | if ( c . dumped ( ) ) { return ; } if ( c . redundant ( ) ) { assert this . stats . clausesRedundant > 0 ; this . stats . clausesRedundant -- ; } else { assert this . stats . clausesIrredundant > 0 ; this . stats . clausesIrredundant -- ; if ( this . dense ) { for ( int i = 0 ; i < c . lits ( ) . size ( ) ; i ++ ) { decOcc ( c . lits ( ) . get ( i ) ) ; } } } c . setDumped ( true ) ; |
public class AbstractIndexPage { /** * Returns ( and loads , if necessary ) the page at the specified position . */
protected AbstractIndexPage getPageByPos ( int pos ) { } } | AbstractIndexPage page = subPages [ pos ] ; if ( page != null ) { return page ; } page = ind . readPage ( subPageIds [ pos ] , this ) ; subPages [ pos ] = page ; return page ; |
public class MapView { /** * Gets the current bounds of the screen in < I > screen coordinates < / I > . */
public Rect getScreenRect ( final Rect reuse ) { } } | final Rect out = getIntrinsicScreenRect ( reuse ) ; if ( this . getMapOrientation ( ) != 0 && this . getMapOrientation ( ) != 180 ) { GeometryMath . getBoundingBoxForRotatatedRectangle ( out , out . centerX ( ) , out . centerY ( ) , this . getMapOrientation ( ) , out ) ; } return out ; |
public class PhaseThreeImpl { /** * { @ inheritDoc } */
@ Override public DocumentModificationResult pruneGene ( final Document genes , final ProtoNetwork network ) { } } | DocumentModificationResult pr = new DocumentModificationResult ( ) ; // Load the equivalences
Set < EquivalenceDataIndex > equivs ; try { equivs = p2 . stage2LoadNamespaceEquivalences ( ) ; } catch ( EquivalenceMapResolutionFailure f ) { // Unrecoverable error
pr . addError ( f . getUserFacingMessage ( ) ) ; pr . setSuccess ( false ) ; return pr ; } // Map namespace to lookup
Map < String , JDBMEquivalenceLookup > lookups = sizedHashMap ( equivs . size ( ) ) ; for ( final EquivalenceDataIndex edi : equivs ) { String rl = edi . getNamespaceResourceLocation ( ) ; DataFileIndex dfi = edi . getEquivalenceIndex ( ) ; lookups . put ( rl , new JDBMEquivalenceLookup ( dfi . getIndexPath ( ) ) ) ; } // Open the indices
for ( final JDBMEquivalenceLookup jl : lookups . values ( ) ) { try { jl . open ( ) ; } catch ( IOException e ) { pr . addError ( e . getMessage ( ) ) ; pr . setSuccess ( false ) ; return pr ; } } // It ' s helpful to reference the contents of the gene scaffolding
// BEL document in understanding this section .
// Establish set of UUIDs relevant for the document ' s member parameters
Set < SkinnyUUID > uuids = new HashSet < SkinnyUUID > ( ) ; pr . setTotalStatements ( genes . getNumberOfStatements ( ) ) ; for ( final Statement stmt : genes ) { // Add all the parameters
final List < Parameter > parameters = stmt . getAllParameters ( ) ; for ( final Parameter p : parameters ) { if ( ! validParameter ( p ) ) { continue ; } Namespace ns = p . getNamespace ( ) ; String val = p . getValue ( ) ; String rl = ns . getResourceLocation ( ) ; JDBMEquivalenceLookup jdbmLookup = lookups . get ( rl ) ; if ( jdbmLookup == null ) { continue ; } SkinnyUUID uuid = jdbmLookup . lookup ( val ) ; if ( uuid != null ) { uuids . add ( uuid ) ; } } } // Establish set of UUIDs relevant for the proto - network
Set < SkinnyUUID > pnUUIDs = new HashSet < SkinnyUUID > ( ) ; ParameterTable paramTbl = network . getParameterTable ( ) ; TableParameter [ ] paramArr = paramTbl . getTableParameterArray ( ) ; for ( final TableParameter tp : paramArr ) { if ( ! validParameter ( tp ) ) { continue ; } TableNamespace namespace = tp . getNamespace ( ) ; String value = tp . getValue ( ) ; String rl = namespace . getResourceLocation ( ) ; JDBMEquivalenceLookup jdbmLookup = lookups . get ( rl ) ; if ( jdbmLookup == null ) { continue ; } SkinnyUUID uuid = jdbmLookup . lookup ( value ) ; if ( uuid != null ) { pnUUIDs . add ( uuid ) ; } } // Two sets of UUIDs have been established at this point .
// The first set , uuids , is the set based on the genes .
// The second set , pnUUIDs , is the set based on the proto - network .
// Make uuids the intersection of the two : ( uuids ∩ pnUUIDs )
uuids . retainAll ( pnUUIDs ) ; final Set < Statement > toPrune = new HashSet < Statement > ( ) ; int pruned = 0 ; // Pruning is simply a matter of iterating all statements in genes . . .
GENES : for ( final Statement stmt : genes ) { List < Parameter > parameters = stmt . getAllParameters ( ) ; // . . . and all parameters in each statement . . .
for ( final Parameter p : parameters ) { if ( ! validParameter ( p ) ) { continue ; } Namespace ns = p . getNamespace ( ) ; String val = p . getValue ( ) ; String rl = ns . getResourceLocation ( ) ; // . . . do we have set membership ?
JDBMEquivalenceLookup jdbmLookup = lookups . get ( rl ) ; if ( jdbmLookup == null ) { continue ; } SkinnyUUID uuid = jdbmLookup . lookup ( val ) ; // Check for set membership , uuid ∊ uuids
if ( uuids . contains ( uuid ) ) { // This indicates equivalent parameters are being
// used by both document and network . This
// statement will remain ( i . e . , not be pruned ) .
continue GENES ; } } toPrune . add ( stmt ) ; } for ( final StatementGroup sg : genes . getStatementGroups ( ) ) { List < Statement > curstmts = sg . getStatements ( ) ; List < Statement > newstmts = sizedArrayList ( curstmts . size ( ) ) ; for ( final Statement stmt : curstmts ) { if ( ! toPrune . contains ( stmt ) ) { newstmts . add ( stmt ) ; } else { pruned ++ ; } } sg . setStatements ( newstmts ) ; } pr . setDeltaStatements ( - ( pruned ) ) ; pr . setSuccess ( true ) ; for ( final JDBMEquivalenceLookup jl : lookups . values ( ) ) { try { jl . close ( ) ; } catch ( IOException e ) { pr . addWarning ( e . getMessage ( ) ) ; } } return pr ; |
public class PackratParserGenUtil { /** * TODO think about super grammar */
public static Keyword findFirstKeywordWithSameConflicts ( final Keyword element , final Grammar grammar ) { } } | final List < AbstractRule > conflicting = getConflictingLexerRules ( element , grammar ) ; Keyword result = element ; Iterator < Keyword > iterator = Iterators . filter ( Iterators . filter ( EcoreUtil . getAllContents ( grammar , true ) , Keyword . class ) , new Predicate < Keyword > ( ) { @ Override public boolean apply ( Keyword param ) { if ( GrammarUtil . containingParserRule ( param ) == null ) return false ; final List < AbstractRule > otherConflicting = getConflictingLexerRules ( param , grammar ) ; return otherConflicting != null && otherConflicting . equals ( conflicting ) ; } } ) ; if ( iterator . hasNext ( ) ) result = iterator . next ( ) ; return result ; |
public class DeepwaterCaffeBackend { /** * given a mini - batch worth of data and labels , train */
@ Override public float [ ] /* ignored */
train ( BackendModel m , float [ /* mini _ batch * input _ neurons */
] data , float [ /* mini _ batch */
] label ) { } } | ( ( DeepwaterCaffeModel ) m ) . train ( data , label ) ; return null ; // return value is always ignored |
public class LoggerCreator { /** * Convert a numerical representation of logging level to the logging level .
* @ param num
* - the numerical index that corresponds to the given level .
* @ return the logging level . */
@ SuppressWarnings ( { } } | "checkstyle:magicnumber" , "checkstyle:returncount" } ) public static Level fromInt ( int num ) { switch ( num ) { case 0 : return Level . OFF ; case 1 : return Level . SEVERE ; case 2 : return Level . WARNING ; case 3 : return Level . INFO ; case 4 : return Level . FINE ; case 5 : return Level . FINER ; case 6 : return Level . FINEST ; case 7 : return Level . ALL ; default : if ( num < 0 ) { return Level . OFF ; } return Level . ALL ; } |
public class ListGroupMembershipsResult { /** * The list of the members of the group .
* @ param groupMemberList
* The list of the members of the group . */
public void setGroupMemberList ( java . util . Collection < GroupMember > groupMemberList ) { } } | if ( groupMemberList == null ) { this . groupMemberList = null ; return ; } this . groupMemberList = new java . util . ArrayList < GroupMember > ( groupMemberList ) ; |
public class FilterAdapter { /** * Create a new FilterAdapter
* @ return a { @ link com . google . cloud . bigtable . hbase . adapters . filters . FilterAdapter } object . */
public static FilterAdapter buildAdapter ( ) { } } | FilterAdapter adapter = new FilterAdapter ( ) ; adapter . addFilterAdapter ( ColumnPrefixFilter . class , new ColumnPrefixFilterAdapter ( ) ) ; adapter . addFilterAdapter ( ColumnRangeFilter . class , new ColumnRangeFilterAdapter ( ) ) ; adapter . addFilterAdapter ( KeyOnlyFilter . class , new KeyOnlyFilterAdapter ( ) ) ; adapter . addFilterAdapter ( MultipleColumnPrefixFilter . class , new MultipleColumnPrefixFilterAdapter ( ) ) ; adapter . addFilterAdapter ( TimestampsFilter . class , new TimestampsFilterAdapter ( ) ) ; adapter . addFilterAdapter ( TimestampRangeFilter . class , new TimestampRangeFilterAdapter ( ) ) ; ValueFilterAdapter valueFilterAdapter = new ValueFilterAdapter ( ) ; adapter . addFilterAdapter ( ValueFilter . class , valueFilterAdapter ) ; SingleColumnValueFilterAdapter scvfa = new SingleColumnValueFilterAdapter ( valueFilterAdapter ) ; adapter . addFilterAdapter ( SingleColumnValueFilter . class , scvfa ) ; adapter . addFilterAdapter ( SingleColumnValueExcludeFilter . class , new SingleColumnValueExcludeFilterAdapter ( scvfa ) ) ; adapter . addFilterAdapter ( ColumnPaginationFilter . class , new ColumnPaginationFilterAdapter ( ) ) ; adapter . addFilterAdapter ( FirstKeyOnlyFilter . class , new FirstKeyOnlyFilterAdapter ( ) ) ; adapter . addFilterAdapter ( ColumnCountGetFilter . class , new ColumnCountGetFilterAdapter ( ) ) ; adapter . addFilterAdapter ( RandomRowFilter . class , new RandomRowFilterAdapter ( ) ) ; adapter . addFilterAdapter ( PrefixFilter . class , new PrefixFilterAdapter ( ) ) ; adapter . addFilterAdapter ( QualifierFilter . class , new QualifierFilterAdapter ( ) ) ; adapter . addFilterAdapter ( PageFilter . class , new PageFilterAdapter ( ) ) ; adapter . addFilterAdapter ( WhileMatchFilter . class , new WhileMatchFilterAdapter ( adapter ) ) ; adapter . addFilterAdapter ( org . apache . hadoop . hbase . filter . RowFilter . class , new RowFilterAdapter ( ) ) ; adapter . addFilterAdapter ( FuzzyRowFilter . class , new FuzzyRowFilterAdapter ( ) ) ; adapter . addFilterAdapter ( FamilyFilter . class , new FamilyFilterAdapter ( ) ) ; adapter . addFilterAdapter ( BigtableFilter . class , new BigtableFilterAdapter ( ) ) ; // MultiRowRangeFilter only exists in hbase > = 1.1
try { adapter . addFilterAdapter ( org . apache . hadoop . hbase . filter . MultiRowRangeFilter . class , new MultiRowRangeFilterAdapter ( ) ) ; } catch ( NoClassDefFoundError ignored ) { } // Passing the FilterAdapter in to the FilterListAdapter is a bit
// unfortunate , but makes adapting the FilterList ' s subfilters simpler .
FilterListAdapter filterListAdapter = new FilterListAdapter ( adapter ) ; // FilterList implements UnsupportedStatusCollector so it should
// be used when possible ( third parameter to addFilterAdapter ( ) ) .
adapter . addFilterAdapter ( FilterList . class , filterListAdapter , filterListAdapter ) ; return adapter ; |
public class Environment { /** * Performs PUT to supplied url of result of applying template with model .
* @ param url url to put to .
* @ param templateName name of template to use .
* @ param model model for template .
* @ param result result to populate with response . */
public void doHttpPut ( String url , String templateName , Object model , HttpResponse result ) { } } | doHttpPut ( url , templateName , model , result , null , XmlHttpResponse . CONTENT_TYPE_XML_TEXT_UTF8 ) ; |
public class SeleniumSpec { /** * Delete or replace the text on a numbered { @ code index } previously found element .
* @ param index */
@ When ( "^I delete the text '(.+?)' on the element on index '(\\d+?)'( and replace it for '(.+?)')?$" ) public void seleniumDelete ( String text , Integer index , String foo , String replacement ) { } } | assertThat ( this . commonspec , commonspec . getPreviousWebElements ( ) ) . as ( "There are less found elements than required" ) . hasAtLeast ( index ) ; Actions actions = new Actions ( commonspec . getDriver ( ) ) ; actions . moveToElement ( commonspec . getPreviousWebElements ( ) . getPreviousWebElements ( ) . get ( index ) , ( text . length ( ) / 2 ) , 0 ) ; for ( int i = 0 ; i < ( text . length ( ) / 2 ) ; i ++ ) { actions . sendKeys ( Keys . ARROW_LEFT ) ; actions . build ( ) . perform ( ) ; } for ( int i = 0 ; i < text . length ( ) ; i ++ ) { actions . sendKeys ( Keys . DELETE ) ; actions . build ( ) . perform ( ) ; } if ( replacement != null && replacement . length ( ) != 0 ) { actions . sendKeys ( replacement ) ; actions . build ( ) . perform ( ) ; } |
public class ByteBuddy { /** * Creates a new { @ link Enum } type .
* < b > Note < / b > : Byte Buddy does not cache previous subclasses but will attempt the generation of a new subclass . For caching
* types , a external cache or { @ link TypeCache } should be used .
* @ param value The names of the type ' s enumeration constants
* @ return A type builder for creating an enumeration type . */
public DynamicType . Builder < ? extends Enum < ? > > makeEnumeration ( String ... value ) { } } | return makeEnumeration ( Arrays . asList ( value ) ) ; |
public class AnnotationTypeOptionalMemberWriterImpl { /** * { @ inheritDoc } */
public void addSummaryAnchor ( ClassDoc cd , Content memberTree ) { } } | memberTree . addContent ( writer . getMarkerAnchor ( SectionName . ANNOTATION_TYPE_OPTIONAL_ELEMENT_SUMMARY ) ) ; |
public class AddOnRunIssuesUtils { /** * Returns the textual representations of the running issues ( Java version and dependency ) , if any .
* The messages are not internationalised , should be used only for logging and non UI uses .
* @ param requirements the run requirements of the add - on or extension
* @ return a { @ code List } containing all the running issues of the add - on or extension , empty if none
* @ see # getUiRunningIssues ( AddOn . BaseRunRequirements , AddOnSearcher )
* @ see # getUiExtensionsRunningIssues ( AddOn . AddOnRunRequirements , AddOnSearcher ) */
public static List < String > getRunningIssues ( AddOn . BaseRunRequirements requirements ) { } } | List < String > issues = new ArrayList < > ( 2 ) ; String issue = getJavaVersionIssue ( requirements ) ; if ( issue != null ) { issues . add ( issue ) ; } issue = getDependencyIssue ( requirements ) ; if ( issue != null ) { issues . add ( issue ) ; } return issues ; |
public class SimpleXMLDataSolrSink { /** * ( non - Javadoc )
* @ see com . sematext . ag . sink . Sink # init ( com . sematext . ag . PlayerConfig ) */
@ Override public void init ( PlayerConfig config ) throws InitializationFailedException { } } | super . init ( config ) ; solrUrl = config . get ( SOLR_URL_KEY ) ; if ( solrUrl == null || "" . equals ( solrUrl . trim ( ) ) ) { throw new IllegalArgumentException ( this . getClass ( ) . getName ( ) + " expects configuration property " + SOLR_URL_KEY ) ; } |
public class KafkaIndexTaskIOConfig { /** * This method is for compatibilty so that newer version of KafkaIndexTaskIOConfig can be read by
* old version of Druid . Note that this method returns end sequence numbers instead of start . This is because
* { @ link SeekableStreamStartSequenceNumbers } didn ' t exist before . */
@ JsonProperty @ Deprecated public SeekableStreamEndSequenceNumbers < Integer , Long > getStartPartitions ( ) { } } | // Converting to start sequence numbers . This is allowed for Kafka because the start offset is always inclusive .
final SeekableStreamStartSequenceNumbers < Integer , Long > startSequenceNumbers = getStartSequenceNumbers ( ) ; return new SeekableStreamEndSequenceNumbers < > ( startSequenceNumbers . getStream ( ) , startSequenceNumbers . getPartitionSequenceNumberMap ( ) ) ; |
public class S3RestUtils { /** * Creates a response using the given object .
* @ param object the object to respond with
* @ return the response */
private static Response createResponse ( Object object ) { } } | if ( object == null ) { return Response . ok ( ) . build ( ) ; } if ( object instanceof Response ) { return ( Response ) object ; } if ( object instanceof Response . Status ) { Response . Status s = ( Response . Status ) object ; switch ( s ) { case OK : return Response . ok ( ) . build ( ) ; case ACCEPTED : return Response . accepted ( ) . build ( ) ; case NO_CONTENT : return Response . noContent ( ) . build ( ) ; default : return createErrorResponse ( new S3Exception ( "Response status is invalid" , S3ErrorCode . INTERNAL_ERROR ) ) ; } } // Need to explicitly encode the string as XML because Jackson will not do it automatically .
XmlMapper mapper = new XmlMapper ( ) ; try { return Response . ok ( mapper . writeValueAsString ( object ) ) . build ( ) ; } catch ( JsonProcessingException e ) { return Response . status ( Response . Status . INTERNAL_SERVER_ERROR ) . entity ( "Failed to encode XML: " + e . getMessage ( ) ) . build ( ) ; } |
public class MetaBeans { /** * Registers a meta - bean .
* This should be done for all beans in a static factory where possible .
* If the meta - bean is dynamic , this method should not be called .
* @ param metaBean the meta - bean , not null
* @ throws IllegalArgumentException if unable to register */
static void register ( MetaBean metaBean ) { } } | Class < ? extends Bean > type = metaBean . beanType ( ) ; if ( META_BEANS . putIfAbsent ( type , metaBean ) != null ) { throw new IllegalArgumentException ( "Cannot register class twice: " + type . getName ( ) ) ; } |
public class MinMax { /** * Return a { @ code Collector } which calculates the minimum and maximum value .
* The given { @ code comparator } is used for comparing two objects .
* < pre > { @ code
* final Comparator < SomeObject > comparator = . . .
* final Stream < SomeObject > stream = . . .
* final MinMax < SomeObject > moments = stream
* . collect ( doubleMoments . toMinMax ( comparator ) ) ;
* } < / pre >
* @ param comparator the { @ code Comparator } to use
* @ param < T > the type of the input elements
* @ return a { @ code Collector } implementing the min - max reduction
* @ throws java . lang . NullPointerException if the given { @ code mapper } is
* { @ code null } */
public static < T > Collector < T , ? , MinMax < T > > toMinMax ( final Comparator < ? super T > comparator ) { } } | requireNonNull ( comparator ) ; return Collector . of ( ( ) -> MinMax . of ( comparator ) , MinMax :: accept , MinMax :: combine ) ; |
public class FunctionParamBuilder { /** * Add variable arguments to the end of the parameter list .
* @ return False if this is called after var args are added . */
public boolean addVarArgs ( JSType type ) { } } | if ( hasVarArgs ( ) ) { return false ; } newParameter ( type ) . setVarArgs ( true ) ; return true ; |
public class AuthUtils { /** * Generates a BASE64 Basic Authentication String
* @ return BASE64 Basic Authentication String */
public static String getBasicAuth ( String user , final String pass ) { } } | return "Basic " + java . util . Base64 . getEncoder ( ) . encodeToString ( ( user + ":" + pass ) . getBytes ( ) ) ; |
public class BPSimDataTypeImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
@ Override public NotificationChain eInverseRemove ( InternalEObject otherEnd , int featureID , NotificationChain msgs ) { } } | switch ( featureID ) { case BpsimPackage . BP_SIM_DATA_TYPE__GROUP : return ( ( InternalEList < ? > ) getGroup ( ) ) . basicRemove ( otherEnd , msgs ) ; case BpsimPackage . BP_SIM_DATA_TYPE__SCENARIO : return ( ( InternalEList < ? > ) getScenario ( ) ) . basicRemove ( otherEnd , msgs ) ; } return super . eInverseRemove ( otherEnd , featureID , msgs ) ; |
public class Weeks { /** * Returns a new instance with the specified number of weeks added .
* This instance is immutable and unaffected by this method call .
* @ param weeks the amount of weeks to add , may be negative
* @ return the new period plus the specified number of weeks
* @ throws ArithmeticException if the result overflows an int */
public Weeks plus ( int weeks ) { } } | if ( weeks == 0 ) { return this ; } return Weeks . weeks ( FieldUtils . safeAdd ( getValue ( ) , weeks ) ) ; |
public class TagWizardController { /** * Delete a single tag
* @ param request the { @ link RemoveTagRequest } containing entityTypeId , attributeName , relationIRI
* and ontologyTermIRI */
@ PostMapping ( "/deletesingletag" ) public @ ResponseBody void deleteSingleTag ( @ Valid @ RequestBody RemoveTagRequest request ) { } } | ontologyTagService . removeAttributeTag ( request . getEntityTypeId ( ) , request . getAttributeName ( ) , request . getRelationIRI ( ) , request . getOntologyTermIRI ( ) ) ; |
public class DateTimeExtensions { /** * Returns an { @ link java . time . OffsetTime } of this time and the provided { @ link java . time . ZoneOffset } .
* @ param self a LocalTime
* @ param offset a ZoneOffset
* @ return an OffsetTime
* @ since 2.5.0 */
public static OffsetTime leftShift ( final LocalTime self , ZoneOffset offset ) { } } | return OffsetTime . of ( self , offset ) ; |
public class VelocityUtil { /** * 生成内容写到响应内容中 < br >
* 模板的变量来自于Request的Attribute对象
* @ param templateFileName 模板文件
* @ param request 请求对象 , 用于获取模板中的变量值
* @ param response 响应对象 */
public static void toWriter ( String templateFileName , javax . servlet . http . HttpServletRequest request , javax . servlet . http . HttpServletResponse response ) { } } | final VelocityContext context = new VelocityContext ( ) ; parseRequest ( context , request ) ; parseSession ( context , request . getSession ( false ) ) ; Writer writer = null ; try { writer = response . getWriter ( ) ; toWriter ( templateFileName , context , writer ) ; } catch ( Exception e ) { throw new UtilException ( e , "Write Velocity content template by [{}] to response error!" , templateFileName ) ; } finally { IoUtil . close ( writer ) ; } |
public class EventClient { /** * Deletes a user .
* @ param uid ID of the user
* @ param eventTime timestamp of the event
* @ return ID of this event */
public String deleteUser ( String uid , DateTime eventTime ) throws ExecutionException , InterruptedException , IOException { } } | return createEvent ( deleteUserAsFuture ( uid , eventTime ) ) ; |
public class Privilege { /** * Check if this privilege matches the given resource / action couple
* @ param resourceName the resource name
* @ param action the action name
* @ return true if the privilege matches */
public boolean matches ( String resourceName , String action ) { } } | // Match rsource name first
if ( ! StringTools . matches ( resourceName , resourcePattern ) ) return false ; // Check action
return actions . contains ( action ) ; |
public class ManagementLocksInner { /** * Gets all the management locks for a resource group .
* @ param resourceGroupName The name of the resource group containing the locks to get .
* @ param filter The filter to apply on the operation .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the observable to the PagedList & lt ; ManagementLockObjectInner & gt ; object */
public Observable < Page < ManagementLockObjectInner > > listByResourceGroupAsync ( final String resourceGroupName , final String filter ) { } } | return listByResourceGroupWithServiceResponseAsync ( resourceGroupName , filter ) . map ( new Func1 < ServiceResponse < Page < ManagementLockObjectInner > > , Page < ManagementLockObjectInner > > ( ) { @ Override public Page < ManagementLockObjectInner > call ( ServiceResponse < Page < ManagementLockObjectInner > > response ) { return response . body ( ) ; } } ) ; |
public class VariableNamePattern { /** * Gets a { @ code VariableNamePattern } which uses the names in { @ code templateVars }
* as the name templates for matching variables . The names are specified in a
* rudimentary pattern language : if a name contains a " ? ( x ) " , this portion is
* allowed to match any integer value . x is a number which is an integer
* offset for the match .
* @ param variables
* @ return */
public static VariableNamePattern fromTemplateVariables ( VariableNumMap templateVariables , VariableNumMap fixedVariables ) { } } | List < VariableNameMatcher > matchers = Lists . newArrayList ( ) ; for ( String variableName : templateVariables . getVariableNamesArray ( ) ) { int varIndex = variableName . indexOf ( "?(" ) ; String variableNamePrefix = variableName . substring ( 0 , varIndex ) ; String variableNameSuffix = variableName . substring ( varIndex + 1 ) ; int offsetIndex = variableNameSuffix . indexOf ( ")" ) ; int offset = Integer . parseInt ( variableNameSuffix . substring ( 1 , offsetIndex ) ) ; variableNameSuffix = variableNameSuffix . substring ( offsetIndex + 1 ) ; matchers . add ( new VariableNameMatcher ( variableNamePrefix , variableNameSuffix , offset ) ) ; } return new VariableNamePattern ( matchers , templateVariables , fixedVariables ) ; |
public class MarkLogicOutputFormat { /** * with outputHost */
protected TextArrayWritable queryHosts ( ContentSource cs , String matchHost , String replaceHost ) throws IOException { } } | Session session = null ; ResultSequence result = null ; try { session = cs . newSession ( ) ; AdhocQuery query = session . newAdhocQuery ( HOSTS_QUERY ) ; // query hosts
RequestOptions options = new RequestOptions ( ) ; options . setDefaultXQueryVersion ( "1.0-ml" ) ; query . setOptions ( options ) ; result = session . submitRequest ( query ) ; ArrayList < Text > hosts = new ArrayList < Text > ( ) ; while ( result . hasNext ( ) ) { ResultItem item = result . next ( ) ; String host = item . asString ( ) ; if ( matchHost != null && host . equals ( matchHost ) ) { hosts . add ( new Text ( replaceHost ) ) ; } else { hosts . add ( new Text ( host ) ) ; } } if ( hosts . isEmpty ( ) ) { throw new IOException ( "Target database has no forests attached: " + "check forests in database" ) ; } return new TextArrayWritable ( hosts . toArray ( new Text [ hosts . size ( ) ] ) ) ; } catch ( RequestException e ) { LOG . error ( e . getMessage ( ) , e ) ; throw new IOException ( e ) ; } finally { if ( result != null ) { result . close ( ) ; } if ( session != null ) { session . close ( ) ; } } |
public class SrvBalanceStd { /** * < p > Handle new accounting entry is created to check dirty .
* This is implementation of dirty check for all accounts . < / p >
* @ param pAddParam additional param
* @ param pAcc account
* @ param pSubaccId subaccount ID
* @ param pDateAt date at
* @ throws Exception - an exception */
@ Override public final synchronized void handleNewAccountEntry ( final Map < String , Object > pAddParam , final Account pAcc , final Long pSubaccId , final Date pDateAt ) throws Exception { } } | if ( lazyGetBalanceAtAllDirtyCheck ( pAddParam ) . getLeastAccountingEntryDate ( ) . getTime ( ) > pDateAt . getTime ( ) ) { boolean isDbgSh = getLogger ( ) . getDbgSh ( this . getClass ( ) ) && getLogger ( ) . getDbgFl ( ) < 11011 && getLogger ( ) . getDbgCl ( ) > 11013 ; if ( isDbgSh ) { getLogger ( ) . debug ( null , SrvBalanceStd . class , "changing least last entry date from " + this . balanceAtAllDirtyCheck . getLeastAccountingEntryDate ( ) + " to " + pDateAt ) ; } this . balanceAtAllDirtyCheck . setLeastAccountingEntryDate ( pDateAt ) ; getSrvOrm ( ) . updateEntity ( pAddParam , this . balanceAtAllDirtyCheck ) ; } |
public class PrettyTime { /** * Removes the mapping for the given { @ link TimeUnit } . This effectively de - registers the { @ link TimeUnit } so it will
* not be used in formatting . Returns the { @ link TimeFormat } that was removed , or null if no such unit was
* registered . */
public TimeFormat removeUnit ( final TimeUnit unit ) { } } | if ( unit == null ) return null ; cachedUnits = null ; return units . remove ( unit ) ; |
public class Validate { /** * < p > Validate that the specified argument character sequence matches the specified regular
* expression pattern ; otherwise throwing an exception with the specified message . < / p >
* < pre > Validate . matchesPattern ( " hi " , " [ a - z ] * " , " % s does not match % s " , " hi " " [ a - z ] * " ) ; < / pre >
* < p > The syntax of the pattern is the one used in the { @ link Pattern } class . < / p >
* @ param input the character sequence to validate , not null
* @ param pattern the regular expression pattern , not null
* @ param message the { @ link String # format ( String , Object . . . ) } exception message if invalid , not null
* @ param values the optional values for the formatted exception message , null array not recommended
* @ throws IllegalArgumentException if the character sequence does not match the pattern
* @ see # matchesPattern ( CharSequence , String )
* @ since 3.0 */
public static void matchesPattern ( final CharSequence input , final String pattern , final String message , final Object ... values ) { } } | // TODO when breaking BC , consider returning input
if ( input == null || ! input . toString ( ) . matches ( pattern ) ) { throw new IllegalArgumentException ( StringUtils . simpleFormat ( message , values ) ) ; } |
public class _ArrayMap { /** * Returns an Iterator of keys in the array . */
public static Iterator < Object > getKeys ( Object [ ] array ) { } } | if ( array == null ) { return null ; } ArrayList < Object > keyList = new ArrayList < Object > ( ) ; int i = array . length - 2 ; while ( i >= 0 ) { keyList . add ( array [ i ] ) ; i = i - 2 ; } return keyList . iterator ( ) ; |
public class CmsPermissionView { /** * Generates the permissions data container . < p >
* @ param permissions the permission set
* @ return the container */
IndexedContainer getPermissionContainer ( CmsPermissionSet permissions ) { } } | IndexedContainer result = new IndexedContainer ( ) ; result . addContainerProperty ( PROPERTY_LABEL , String . class , "" ) ; result . addContainerProperty ( PROPERTY_VALUE , Integer . class , null ) ; result . addContainerProperty ( PROPERTY_ALLOWED , Boolean . class , Boolean . FALSE ) ; result . addContainerProperty ( PROPERTY_DISPLAY_ALLOWED , Label . class , null ) ; result . addContainerProperty ( PROPERTY_DENIED , Boolean . class , Boolean . FALSE ) ; result . addContainerProperty ( PROPERTY_DISPLAY_DENIED , Label . class , null ) ; for ( String key : CmsPermissionSet . getPermissionKeys ( ) ) { int flag = CmsPermissionSet . getPermissionValue ( key ) ; Item entry = result . addItem ( key ) ; entry . getItemProperty ( PROPERTY_LABEL ) . setValue ( CmsVaadinUtils . getMessageText ( key ) ) ; entry . getItemProperty ( PROPERTY_ALLOWED ) . setValue ( isAllowed ( permissions , flag ) ) ; entry . getItemProperty ( PROPERTY_DISPLAY_ALLOWED ) . setValue ( getCheckBoxLabel ( isAllowed ( permissions , flag ) ) ) ; entry . getItemProperty ( PROPERTY_DENIED ) . setValue ( isDenied ( permissions , flag ) ) ; entry . getItemProperty ( PROPERTY_DISPLAY_DENIED ) . setValue ( getCheckBoxLabel ( isDenied ( permissions , flag ) ) ) ; entry . getItemProperty ( PROPERTY_VALUE ) . setValue ( Integer . valueOf ( flag ) ) ; } return result ; |
public class ByteBufferUtils { /** * Splits the specified byte array into an array of { @ link ByteBuffer } s ,
* each with the specified maximum size . This is useful , for example ,
* when trying to limit sizes to a network MTU .
* @ param byteArray The array to segment .
* @ param sizeLimit The maximum size of each byte buffer .
* @ return The new array of { @ link ByteBuffer } s . */
public static ByteBuffer [ ] toArray ( final byte [ ] byteArray , final int sizeLimit ) { } } | final int numBufs = ( int ) Math . ceil ( ( double ) byteArray . length / ( double ) sizeLimit ) ; final ByteBuffer [ ] bufs = new ByteBuffer [ numBufs ] ; int byteIndex = 0 ; for ( int i = 0 ; i < numBufs ; i ++ ) { final int numBytes ; final int remaining = byteArray . length - byteIndex ; if ( remaining < sizeLimit ) { numBytes = remaining ; } else { numBytes = sizeLimit ; } bufs [ i ] = ByteBuffer . wrap ( byteArray , byteIndex , numBytes ) ; byteIndex += sizeLimit ; } return bufs ; |
public class BaseStrategy { /** * if this is a has ( property ) step , returns the property key , otherwise returns null
* @ param currentStep the step
* @ return the property which should be not null */
private String isNotNullStep ( Step < ? , ? > currentStep ) { } } | if ( currentStep instanceof TraversalFilterStep < ? > ) { TraversalFilterStep < ? > tfs = ( TraversalFilterStep < ? > ) currentStep ; List < ? > c = tfs . getLocalChildren ( ) ; if ( c != null && c . size ( ) == 1 ) { Traversal . Admin < ? , ? > a = ( Traversal . Admin < ? , ? > ) c . iterator ( ) . next ( ) ; Step < ? , ? > s = a . getEndStep ( ) ; if ( a . getSteps ( ) . size ( ) == 1 && s instanceof PropertiesStep < ? > ) { PropertiesStep < ? > ps = ( PropertiesStep < ? > ) s ; String [ ] keys = ps . getPropertyKeys ( ) ; if ( keys != null && keys . length == 1 ) { return keys [ 0 ] ; } } } } return null ; |
public class EventSubscriptionManager { /** * @ return the message start event subscriptions with the given message name ( from any tenant )
* @ see # findMessageStartEventSubscriptionByNameAndTenantId ( String , String ) */
@ SuppressWarnings ( "unchecked" ) public List < EventSubscriptionEntity > findMessageStartEventSubscriptionByName ( String messageName ) { } } | return getDbEntityManager ( ) . selectList ( "selectMessageStartEventSubscriptionByName" , configureParameterizedQuery ( messageName ) ) ; |
public class EqualizeHistTransform { /** * Takes an image and returns a transformed image .
* Uses the random object in the case of random transformations .
* @ param image to transform , null = = end of stream
* @ param random object to use ( or null for deterministic )
* @ return transformed image */
@ Override protected ImageWritable doTransform ( ImageWritable image , Random random ) { } } | if ( image == null ) { return null ; } Mat mat = ( Mat ) converter . convert ( image . getFrame ( ) ) ; Mat result = new Mat ( ) ; try { if ( mat . channels ( ) == 1 ) { equalizeHist ( mat , result ) ; } else { split ( mat , splitChannels ) ; equalizeHist ( splitChannels . get ( 0 ) , splitChannels . get ( 0 ) ) ; // equalize histogram on the 1st channel ( Y )
merge ( splitChannels , result ) ; } } catch ( Exception e ) { throw new RuntimeException ( e ) ; } return new ImageWritable ( converter . convert ( result ) ) ; |
public class V1LoggerModel { /** * { @ inheritDoc } */
@ Override public LoggerModel setType ( LoggerType type ) { } } | String t = type != null ? type . name ( ) : null ; setModelAttribute ( "type" , t ) ; return this ; |
public class DefaultDisseminatorImpl { /** * Returns an HTML rendering of the Dissemination Index for the object . The
* Dissemination Index is a list of method definitions that represent all
* disseminations possible on the object . The Dissemination Index is
* returned as HTML in a presentation - oriented format . This is accomplished
* by doing an XSLT transform on the XML that is obtained from listMethods
* in API - A .
* @ return html packaged as a MIMETypedStream
* @ throws ServerException */
public MIMETypedStream viewMethodIndex ( ) throws ServerException { } } | // sdp : the dissemination index is disabled for service definition and deployment objects
// so send back a message saying so .
if ( reader . hasContentModel ( Models . SERVICE_DEFINITION_3_0 ) || reader . hasContentModel ( Models . SERVICE_DEPLOYMENT_3_0 ) ) { return noMethodIndexMsg ( ) ; } // get xml expression of method definitions
ObjectMethodsDef [ ] methods = m_access . listMethods ( context , reader . GetObjectPID ( ) , asOfDateTime ) ; ReadableCharArrayWriter buffer = new ReadableCharArrayWriter ( 1024 ) ; ObjectInfoAsXML . getMethodIndex ( reposBaseURL , reader . GetObjectPID ( ) , methods , asOfDateTime , buffer ) ; buffer . close ( ) ; Reader in = buffer . toReader ( ) ; // transform the method definitions xml to an html view
try { ReadableByteArrayOutputStream bytes = new ReadableByteArrayOutputStream ( 2048 ) ; PrintWriter out = new PrintWriter ( new OutputStreamWriter ( bytes , Charset . forName ( "UTF-8" ) ) ) ; File xslFile = new File ( reposHomeDir , "access/listMethods.xslt" ) ; Templates template = XmlTransformUtility . getTemplates ( xslFile ) ; Transformer transformer = template . newTransformer ( ) ; transformer . setParameter ( "fedora" , context . getEnvironmentValue ( Constants . FEDORA_APP_CONTEXT_NAME ) ) ; transformer . transform ( new StreamSource ( in ) , new StreamResult ( out ) ) ; out . close ( ) ; return new MIMETypedStream ( "text/html" , bytes . toInputStream ( ) , null , bytes . length ( ) ) ; } catch ( Exception e ) { throw new DisseminationException ( "[DefaultDisseminatorImpl] had an error " + "in transforming xml for viewItemIndex. " + "Underlying exception was: " + e . getMessage ( ) ) ; } |
public class Boxing { /** * Transforms any array into an array of { @ code Double } .
* @ param src source array
* @ param srcPos start position
* @ param len length
* @ return Double array */
public static Double [ ] boxDoubles ( Object src , int srcPos , int len ) { } } | return boxDoubles ( array ( src ) , srcPos , len ) ; |
public class A_CmsEditUserGroupRoleDialog { /** * Check if table exists in given layout . Removes all tables . < p >
* @ param layout to be cleaned from tables */
private void removeExistingTable ( VerticalLayout layout ) { } } | List < Component > tobeRemoved = new ArrayList < Component > ( ) ; Iterator < Component > it = layout . iterator ( ) ; while ( it . hasNext ( ) ) { Component comp = it . next ( ) ; if ( ( comp instanceof FixedHeightPanel ) | ( comp instanceof TextField ) | ( comp instanceof VerticalLayout ) ) { tobeRemoved . add ( comp ) ; } } for ( Component c : tobeRemoved ) { layout . removeComponent ( c ) ; } |
public class MtasSolrSearchComponent { /** * ( non - Javadoc )
* @ see org . apache . solr . handler . component . SearchComponent # init ( org . apache . solr .
* common . util . NamedList ) */
@ Override public void init ( NamedList args ) { } } | super . init ( args ) ; // init components
searchStatus = new MtasSolrComponentStatus ( this ) ; searchVersion = new MtasSolrComponentVersion ( this ) ; searchDocument = new MtasSolrComponentDocument ( this ) ; searchKwic = new MtasSolrComponentKwic ( this ) ; searchList = new MtasSolrComponentList ( this ) ; searchGroup = new MtasSolrComponentGroup ( this ) ; searchTermvector = new MtasSolrComponentTermvector ( this ) ; searchPrefix = new MtasSolrComponentPrefix ( this ) ; searchStats = new MtasSolrComponentStats ( this ) ; searchFacet = new MtasSolrComponentFacet ( this ) ; searchCollection = new MtasSolrComponentCollection ( this ) ; // init collection
String collectionCacheDirectory = null ; Long collectionLifetime = null ; Integer collectionMaximumNumber = null ; Integer collectionMaximumOverflow = null ; if ( args . get ( CONFIG_COLLECTION_CACHE_DIRECTORY ) != null && args . get ( CONFIG_COLLECTION_CACHE_DIRECTORY ) instanceof String ) { collectionCacheDirectory = ( String ) args . get ( CONFIG_COLLECTION_CACHE_DIRECTORY ) ; } else { log . error ( "no " + CONFIG_COLLECTION_CACHE_DIRECTORY + " defined for " + this . getClass ( ) . getSimpleName ( ) ) ; } if ( args . get ( CONFIG_COLLECTION_LIFETIME ) != null && args . get ( CONFIG_COLLECTION_LIFETIME ) instanceof Long ) { collectionLifetime = ( Long ) args . get ( CONFIG_COLLECTION_LIFETIME ) ; } else { log . error ( "no " + CONFIG_COLLECTION_LIFETIME + " defined for " + this . getClass ( ) . getSimpleName ( ) ) ; } if ( args . get ( CONFIG_COLLECTION_MAXIMUM_NUMBER ) != null && args . get ( CONFIG_COLLECTION_MAXIMUM_NUMBER ) instanceof Integer ) { collectionMaximumNumber = ( Integer ) args . get ( CONFIG_COLLECTION_MAXIMUM_NUMBER ) ; } else { log . error ( "no " + CONFIG_COLLECTION_MAXIMUM_NUMBER + " defined for " + this . getClass ( ) . getSimpleName ( ) ) ; } if ( args . get ( CONFIG_COLLECTION_MAXIMUM_OVERFLOW ) != null && args . get ( CONFIG_COLLECTION_MAXIMUM_OVERFLOW ) instanceof Integer ) { collectionMaximumNumber = ( Integer ) args . get ( CONFIG_COLLECTION_MAXIMUM_OVERFLOW ) ; } else { log . error ( "no " + CONFIG_COLLECTION_MAXIMUM_OVERFLOW + " defined for " + this . getClass ( ) . getSimpleName ( ) ) ; } collectionCache = new MtasSolrCollectionCache ( collectionCacheDirectory , collectionLifetime , collectionMaximumNumber , collectionMaximumOverflow ) ; |
public class AbstractAmazonSNSAsync { /** * Simplified method form for invoking the CreateTopic operation with an AsyncHandler .
* @ see # createTopicAsync ( CreateTopicRequest , com . amazonaws . handlers . AsyncHandler ) */
@ Override public java . util . concurrent . Future < CreateTopicResult > createTopicAsync ( String name , com . amazonaws . handlers . AsyncHandler < CreateTopicRequest , CreateTopicResult > asyncHandler ) { } } | return createTopicAsync ( new CreateTopicRequest ( ) . withName ( name ) , asyncHandler ) ; |
public class LocalClientFactory { /** * Creates a local client from a configuration map .
* @ param config the config from apiman . properties
* @ param indexName the name of the ES index
* @ param defaultIndexName the default ES index name
* @ return the ES client */
public JestClient createLocalClient ( Map < String , String > config , String indexName , String defaultIndexName ) { } } | String clientLocClassName = config . get ( "client.class" ) ; // $ NON - NLS - 1 $
String clientLocFieldName = config . get ( "client.field" ) ; // $ NON - NLS - 1 $
return createLocalClient ( clientLocClassName , clientLocFieldName , indexName , defaultIndexName ) ; |
public class App { /** * Opens a new tab , and have it selected . The page provided will be loaded
* @ param url - the url to load once the new tab is opened and selected */
public void openNewWindow ( String url ) { } } | String action = "Opening new window to url " + url ; String expected = "New window is opened to url " + url ; try { JavascriptExecutor jse = ( JavascriptExecutor ) driver ; jse . executeScript ( "window.open('" + url + "','_blank');" ) ; } catch ( Exception e ) { reporter . fail ( action , expected , "Unable to open window tab. " + e . getMessage ( ) ) ; log . warn ( e ) ; return ; } switchToNewWindow ( ) ; waitFor ( ) . urlEquals ( url ) ; if ( ! get ( ) . url ( ) . equals ( url ) ) { reporter . fail ( action , expected , "Unable to open new window to " + url ) ; return ; } reporter . pass ( action , expected , expected ) ; acceptCertificate ( ) ; |
public class AbstractMonteCarloProduct { /** * / * ( non - Javadoc )
* @ see net . finmath . montecarlo . MonteCarloProduct # getValues ( double , net . finmath . montecarlo . MonteCarloSimulationModel ) */
@ Override public Map < String , Object > getValues ( double evaluationTime , MonteCarloSimulationModel model ) throws CalculationException { } } | RandomVariable values = getValue ( evaluationTime , model ) ; if ( values == null ) { return null ; } // Sum up values on path
double value = values . getAverage ( ) ; double error = values . getStandardError ( ) ; Map < String , Object > results = new HashMap < > ( ) ; results . put ( "value" , value ) ; results . put ( "error" , error ) ; return results ; |
public class StrategyEvaluator { /** * Evaluate supplier stategies that produce Optionals until one returns something . This implements the strategy
* pattern with a simple varargs of suppliers that produce an Optional T . You can use this to use several
* strategies to calculate a T .
* @ param suppliers
* the strategies that produce Optionals of the desired output type .
* @ return the optional provided by the first successful supplier or Optional . empty ( )
* @ param < T > type */
@ SafeVarargs public static < T > Optional < T > evaluate ( Supplier < Optional < T > > ... suppliers ) { } } | for ( Supplier < Optional < T > > s : suppliers ) { Optional < T > maybe = s . get ( ) ; if ( maybe . isPresent ( ) ) { return maybe ; } } return Optional . empty ( ) ; |
public class vpnclientlessaccesspolicy { /** * Use this API to fetch vpnclientlessaccesspolicy resource of given name . */
public static vpnclientlessaccesspolicy get ( nitro_service service , String name ) throws Exception { } } | vpnclientlessaccesspolicy obj = new vpnclientlessaccesspolicy ( ) ; obj . set_name ( name ) ; vpnclientlessaccesspolicy response = ( vpnclientlessaccesspolicy ) obj . get_resource ( service ) ; return response ; |
public class SegmentAggregator { /** * Validates that the given StorageOperation can be processed , given the current accumulated state of the Segment .
* @ param operation The operation to check .
* @ throws DataCorruptionException If any of the validations failed .
* @ throws IllegalArgumentException If the operation has an undefined Offset or Length ( these are not considered data -
* corrupting issues ) . */
private void checkValidStorageOperation ( StorageOperation operation ) throws DataCorruptionException { } } | // StreamSegmentAppendOperations need to be pre - processed into CachedStreamSegmentAppendOperations .
Preconditions . checkArgument ( ! ( operation instanceof StreamSegmentAppendOperation ) , "SegmentAggregator cannot process StreamSegmentAppendOperations." ) ; // Verify operation offset against the lastAddedOffset ( whether the last Op in the list or StorageLength ) .
long offset = operation . getStreamSegmentOffset ( ) ; long length = operation . getLength ( ) ; Preconditions . checkArgument ( offset >= 0 , "Operation '%s' has an invalid offset (%s)." , operation , operation . getStreamSegmentOffset ( ) ) ; Preconditions . checkArgument ( length >= 0 , "Operation '%s' has an invalid length (%s)." , operation , operation . getLength ( ) ) ; // Check that operations are contiguous ( only for the operations after the first one - as we initialize lastAddedOffset on the first op ) .
if ( isTruncateOperation ( operation ) ) { if ( this . metadata . getStartOffset ( ) < operation . getStreamSegmentOffset ( ) ) { throw new DataCorruptionException ( String . format ( "StreamSegmentTruncateOperation '%s' has a truncation offset beyond the one in the Segment's Metadata. Expected: at most %d, actual: %d." , operation , this . metadata . getStartOffset ( ) , offset ) ) ; } } else { long lastOffset = this . lastAddedOffset . get ( ) ; if ( lastOffset >= 0 && offset != lastOffset ) { throw new DataCorruptionException ( String . format ( "Wrong offset for Operation '%s'. Expected: %s, actual: %d." , operation , this . lastAddedOffset , offset ) ) ; } } // Check that the operation does not exceed the Length of the StreamSegment .
if ( offset + length > this . metadata . getLength ( ) ) { throw new DataCorruptionException ( String . format ( "Operation '%s' has at least one byte beyond its Length. Offset = %d, Length = %d, Length = %d." , operation , offset , length , this . metadata . getLength ( ) ) ) ; } if ( operation instanceof StreamSegmentSealOperation ) { // For StreamSegmentSealOperations , we must ensure the offset of the operation is equal to the Length for the segment .
if ( this . metadata . getLength ( ) != offset ) { throw new DataCorruptionException ( String . format ( "Wrong offset for Operation '%s'. Expected: %d (Length), actual: %d." , operation , this . metadata . getLength ( ) , offset ) ) ; } // Even though not an offset , we should still verify that the metadata actually thinks this is a sealed segment .
if ( ! this . metadata . isSealed ( ) ) { throw new DataCorruptionException ( String . format ( "Received Operation '%s' for a non-sealed segment." , operation ) ) ; } } |
public class ByteBufUtil { /** * Encode a { @ link CharSequence } in < a href = " http : / / en . wikipedia . org / wiki / ASCII " > ASCII < / a > and write it
* to a { @ link ByteBuf } .
* This method returns the actual number of bytes written . */
public static int writeAscii ( ByteBuf buf , CharSequence seq ) { } } | // ASCII uses 1 byte per char
final int len = seq . length ( ) ; if ( seq instanceof AsciiString ) { AsciiString asciiString = ( AsciiString ) seq ; buf . writeBytes ( asciiString . array ( ) , asciiString . arrayOffset ( ) , len ) ; } else { for ( ; ; ) { if ( buf instanceof WrappedCompositeByteBuf ) { // WrappedCompositeByteBuf is a sub - class of AbstractByteBuf so it needs special handling .
buf = buf . unwrap ( ) ; } else if ( buf instanceof AbstractByteBuf ) { AbstractByteBuf byteBuf = ( AbstractByteBuf ) buf ; byteBuf . ensureWritable0 ( len ) ; int written = writeAscii ( byteBuf , byteBuf . writerIndex , seq , len ) ; byteBuf . writerIndex += written ; return written ; } else if ( buf instanceof WrappedByteBuf ) { // Unwrap as the wrapped buffer may be an AbstractByteBuf and so we can use fast - path .
buf = buf . unwrap ( ) ; } else { byte [ ] bytes = seq . toString ( ) . getBytes ( CharsetUtil . US_ASCII ) ; buf . writeBytes ( bytes ) ; return bytes . length ; } } } return len ; |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.