signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class LocalTime { /** * Returns a copy of this time minus the specified number of seconds . * This LocalTime instance is immutable and unaffected by this method call . * The following three lines are identical in effect : * < pre > * LocalTime subtracted = dt . minusSeconds ( 6 ) ; * LocalTime subtracted = dt . minus ( Period . seconds ( 6 ) ) ; * LocalTime subtracted = dt . withFieldAdded ( DurationFieldType . seconds ( ) , - 6 ) ; * < / pre > * @ param seconds the amount of seconds to subtract , may be negative * @ return the new LocalTime minus the increased seconds */ public LocalTime minusSeconds ( int seconds ) { } }
if ( seconds == 0 ) { return this ; } long instant = getChronology ( ) . seconds ( ) . subtract ( getLocalMillis ( ) , seconds ) ; return withLocalMillis ( instant ) ;
public class FileUtil { /** * 创建文件夹 , 会递归自动创建其不存在的父文件夹 , 如果存在直接返回此文件夹 < br > * 此方法不对File对象类型做判断 , 如果File不存在 , 无法判断其类型 * @ param dir 目录 * @ return 创建的目录 */ public static File mkdir ( File dir ) { } }
if ( dir == null ) { return null ; } if ( false == dir . exists ( ) ) { dir . mkdirs ( ) ; } return dir ;
public class DateTimeUtil { /** * Turn Date into " yyyyMMddTHHmmss " * @ param val date * @ return String " yyyyMMddTHHmmss " */ public static String isoDateTime ( final Date val ) { } }
synchronized ( isoDateTimeFormat ) { try { isoDateTimeFormat . setTimeZone ( Timezones . getDefaultTz ( ) ) ; } catch ( TimezonesException tze ) { throw new RuntimeException ( tze ) ; } return isoDateTimeFormat . format ( val ) ; }
public class AmazonDirectConnectClient { /** * Associates an existing connection with a link aggregation group ( LAG ) . The connection is interrupted and * re - established as a member of the LAG ( connectivity to AWS is interrupted ) . The connection must be hosted on the * same AWS Direct Connect endpoint as the LAG , and its bandwidth must match the bandwidth for the LAG . You can * re - associate a connection that ' s currently associated with a different LAG ; however , if removing the connection * would cause the original LAG to fall below its setting for minimum number of operational connections , the request * fails . * Any virtual interfaces that are directly associated with the connection are automatically re - associated with the * LAG . If the connection was originally associated with a different LAG , the virtual interfaces remain associated * with the original LAG . * For interconnects , any hosted connections are automatically re - associated with the LAG . If the interconnect was * originally associated with a different LAG , the hosted connections remain associated with the original LAG . * @ param associateConnectionWithLagRequest * @ return Result of the AssociateConnectionWithLag operation returned by the service . * @ throws DirectConnectServerException * A server - side error occurred . * @ throws DirectConnectClientException * One or more parameters are not valid . * @ sample AmazonDirectConnect . AssociateConnectionWithLag * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / directconnect - 2012-10-25 / AssociateConnectionWithLag " * target = " _ top " > AWS API Documentation < / a > */ @ Override public AssociateConnectionWithLagResult associateConnectionWithLag ( AssociateConnectionWithLagRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeAssociateConnectionWithLag ( request ) ;
public class UnicodeEmojiImpl { /** * Gets a unicode emoji by its string representation . * @ param emoji The emoji string . * @ return The object , representing the emoji from the given string . */ public static UnicodeEmojiImpl fromString ( String emoji ) { } }
return unicodeEmojis . computeIfAbsent ( emoji , key -> new UnicodeEmojiImpl ( emoji ) ) ;
public class Journaler { /** * Check the parameters for required values and for acceptable values . At * this point , the only parameter we care about is " mode " . */ private void parseParameters ( Map < String , String > parameters ) throws ModuleInitializationException { } }
logger . info ( "Parameters: " + parameters ) ; String mode = parameters . get ( PARAMETER_JOURNAL_MODE ) ; if ( mode == null ) { inRecoveryMode = false ; } else if ( mode . equals ( VALUE_JOURNAL_MODE_NORMAL ) ) { inRecoveryMode = false ; } else if ( mode . equals ( VALUE_JOURNAL_MODE_RECOVER ) ) { inRecoveryMode = true ; } else { throw new ModuleInitializationException ( "'" + PARAMETER_JOURNAL_MODE + "' parameter must be '" + VALUE_JOURNAL_MODE_NORMAL + "'(default) or '" + VALUE_JOURNAL_MODE_RECOVER + "'" , getRole ( ) ) ; }
public class CPRulePersistenceImpl { /** * Returns a range of all the cp rules that the user has permission to view where groupId = & # 63 ; . * Useful when paginating results . Returns a maximum of < code > end - start < / code > instances . < code > start < / code > and < code > end < / code > are not primary keys , they are indexes in the result set . Thus , < code > 0 < / code > refers to the first result in the set . Setting both < code > start < / code > and < code > end < / code > to { @ link QueryUtil # ALL _ POS } will return the full result set . If < code > orderByComparator < / code > is specified , then the query will include the given ORDER BY logic . If < code > orderByComparator < / code > is absent and pagination is required ( < code > start < / code > and < code > end < / code > are not { @ link QueryUtil # ALL _ POS } ) , then the query will include the default ORDER BY logic from { @ link CPRuleModelImpl } . If both < code > orderByComparator < / code > and pagination are absent , for performance reasons , the query will not have an ORDER BY clause and the returned result set will be sorted on by the primary key in an ascending order . * @ param groupId the group ID * @ param start the lower bound of the range of cp rules * @ param end the upper bound of the range of cp rules ( not inclusive ) * @ return the range of matching cp rules that the user has permission to view */ @ Override public List < CPRule > filterFindByGroupId ( long groupId , int start , int end ) { } }
return filterFindByGroupId ( groupId , start , end , null ) ;
public class DaVinci { /** * When the bitmap has been downloaded , load it into the [ into ] object * @ param bitmap the downloaded bitmap * @ param path the image source path ( path or url ) * @ param into the destination object */ private static void returnBitmapInto ( final Bitmap bitmap , final String path , final Object into ) { } }
new Handler ( Looper . getMainLooper ( ) ) . post ( new Runnable ( ) { @ Override public void run ( ) { if ( into != null && path != null && bitmap != null ) { if ( into instanceof ImageView ) { Log . d ( TAG , "return bitmap " + path + " into ImageView" ) ; ( ( ImageView ) into ) . setImageBitmap ( bitmap ) ; } else if ( into instanceof Callback ) { Log . d ( TAG , "return bitmap " + path + " into Callback" ) ; ( ( Callback ) into ) . onBitmapLoaded ( path , bitmap ) ; } } } } ) ;
public class UnnecessaryStoreBeforeReturn { /** * implements the visitor to look for store of registers immediately before returns of that register * @ param seen * the opcode of the currently parsed instruction */ @ Override public void sawOpcode ( int seen ) { } }
int lhsReg = - 1 ; try { stack . precomputation ( this ) ; switch ( state ) { case SEEN_NOTHING : if ( ! catchTargets . get ( getPC ( ) ) && lookForStore ( seen ) && ( stack . getStackDepth ( ) >= 1 ) ) { OpcodeStack . Item item = stack . getStackItem ( 0 ) ; Integer reg = ( Integer ) item . getUserValue ( ) ; if ( ( reg == null ) || ( reg . intValue ( ) != storeReg ) ) { state = State . SEEN_STORE ; } } break ; case SEEN_STORE : if ( branchTargets . get ( getPC ( ) ) ) { state = State . SEEN_NOTHING ; break ; } state = lookForLoad ( seen ) ? State . SEEN_LOAD : State . SEEN_NOTHING ; break ; case SEEN_LOAD : if ( ( seen >= Const . IRETURN ) && ( seen <= Const . ARETURN ) ) { bugReporter . reportBug ( new BugInstance ( this , BugType . USBR_UNNECESSARY_STORE_BEFORE_RETURN . name ( ) , NORMAL_PRIORITY ) . addClass ( this ) . addMethod ( this ) . addSourceLine ( this ) ) ; } state = State . SEEN_NOTHING ; break ; } if ( branchInstructions . get ( seen ) ) { branchTargets . set ( getBranchTarget ( ) ) ; } lhsReg = processBinOp ( seen ) ; } finally { TernaryPatcher . pre ( stack , seen ) ; stack . sawOpcode ( this , seen ) ; TernaryPatcher . post ( stack , seen ) ; if ( ( lhsReg > - 1 ) && ( stack . getStackDepth ( ) >= 1 ) ) { OpcodeStack . Item item = stack . getStackItem ( 0 ) ; item . setUserValue ( Integer . valueOf ( lhsReg ) ) ; } }
public class ElementWithOptions { /** * Returns all the options in this element , in list order . * @ param includeDisabled Indicates if disabled options should be included in the list . * If a group itself is disabled , its options are also considered as disabled . * @ return List of options */ public List < Option > getOptions ( boolean includeDisabled ) { } }
List < Option > result = new ArrayList < > ( ) ; for ( OptionGroup group : optionGroups . values ( ) ) { result . addAll ( group . getOptions ( includeDisabled ) ) ; } return result ;
public class StringUtil { /** * This function returns a string with whitespace stripped from the beginning of str * @ param str String to clean * @ return cleaned String */ public static String ltrim ( String str , String defaultValue ) { } }
if ( str == null ) return defaultValue ; int len = str . length ( ) ; int st = 0 ; while ( ( st < len ) && ( str . charAt ( st ) <= ' ' ) ) { st ++ ; } return ( ( st > 0 ) ) ? str . substring ( st ) : str ;
public class ChatBalloon { /** * 获取已设置了的padding值 */ private int [ ] gotExistedPadding ( ) { } }
int [ ] ret = new int [ 4 ] ; ret [ 0 ] = getPaddingLeft ( ) ; if ( 0 == ret [ 0 ] ) { ret [ 0 ] = cornerSizeLeftTop > cornerSizeLeftBottom ? cornerSizeLeftTop : cornerSizeLeftBottom ; } ret [ 1 ] = getPaddingTop ( ) ; if ( 0 == ret [ 1 ] ) { ret [ 1 ] = cornerSizeLeftTop > cornerSizeRightTop ? cornerSizeLeftTop : cornerSizeRightTop ; } ret [ 2 ] = getPaddingRight ( ) ; if ( 0 == ret [ 2 ] ) { ret [ 2 ] = cornerSizeRightTop > cornerSizeRightBottom ? cornerSizeRightTop : cornerSizeRightBottom ; } ret [ 3 ] = getPaddingBottom ( ) ; if ( 0 == ret [ 3 ] ) { ret [ 3 ] = cornerSizeLeftBottom > cornerSizeRightBottom ? cornerSizeLeftBottom : cornerSizeRightBottom ; } return ret ;
public class AfplibFactoryImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public IOBObjType createIOBObjTypeFromString ( EDataType eDataType , String initialValue ) { } }
IOBObjType result = IOBObjType . get ( initialValue ) ; if ( result == null ) throw new IllegalArgumentException ( "The value '" + initialValue + "' is not a valid enumerator of '" + eDataType . getName ( ) + "'" ) ; return result ;
public class CleverTapAPI { /** * This method is used to change the credentials of CleverTap account Id , token and region programmatically * @ param accountID CleverTap Account Id * @ param token CleverTap Account Token * @ param region Clever Tap Account Region */ @ SuppressWarnings ( { } }
"unused" , "WeakerAccess" } ) public static void changeCredentials ( String accountID , String token , String region ) { if ( defaultConfig != null ) { Logger . i ( "CleverTap SDK already initialized with accountID:" + defaultConfig . getAccountId ( ) + " and token:" + defaultConfig . getAccountToken ( ) + ". Cannot change credentials to " + accountID + " and " + token ) ; return ; } ManifestInfo . changeCredentials ( accountID , token , region ) ;
public class RxSharedPreferences { /** * Create a boolean preference for { @ code key } . Default is { @ code false } . */ @ CheckResult @ NonNull public Preference < Boolean > getBoolean ( @ NonNull String key ) { } }
return getBoolean ( key , DEFAULT_BOOLEAN ) ;
public class JobSupervisor { /** * public boolean cancelNotifyAndRestart ( ) { * Cancel all partition states * jobProcessInformation . cancelPartitionState ( ) ; * Notify all other nodes about cancellation * Set < Address > addresses = collectRemoteAddresses ( ) ; * Now notify all involved members to cancel the job * asyncCancelRemoteOperations ( addresses ) ; * Kill local tasks * String jobId = getConfiguration ( ) . getJobId ( ) ; * MapCombineTask mapCombineTask = jobTracker . unregisterMapCombineTask ( jobId ) ; * if ( mapCombineTask ! = null ) { * mapCombineTask . cancel ( ) ; * ReducerTask reducerTask = jobTracker . unregisterReducerTask ( jobId ) ; * if ( reducerTask ! = null ) { * reducerTask . cancel ( ) ; * Reset local data * jobProcessInformation . resetPartitionState ( ) ; * reducers . clear ( ) ; * remoteReducers . clear ( ) ; * context . set ( null ) ; * keyAssignments . clear ( ) ; * Restart * TODO restart with a new KeyValueJob * return true ; */ public TrackableJobFuture cancel ( ) { } }
String jobId = getConfiguration ( ) . getJobId ( ) ; TrackableJobFuture future = jobTracker . unregisterTrackableJob ( jobId ) ; MapCombineTask mapCombineTask = jobTracker . unregisterMapCombineTask ( jobId ) ; if ( mapCombineTask != null ) { mapCombineTask . cancel ( ) ; } ReducerTask reducerTask = jobTracker . unregisterReducerTask ( jobId ) ; if ( reducerTask != null ) { reducerTask . cancel ( ) ; } mapReduceService . destroyJobSupervisor ( this ) ; return future ;
public class JavaSourceUtils { /** * 合并类型声明的分发函数 */ @ SuppressWarnings ( "uncheck" ) public static TypeDeclaration mergeType ( TypeDeclaration one , TypeDeclaration two ) { } }
TypeDeclaration type = null ; if ( isAllNull ( one , two ) ) return null ; if ( isAllNotNull ( one , two ) ) { // just ignore when class type are not same if ( one . getClass ( ) . equals ( two . getClass ( ) ) ) { if ( one instanceof AnnotationDeclaration ) { type = mergeType ( ( AnnotationDeclaration ) one , ( AnnotationDeclaration ) two ) ; } else if ( one instanceof ClassOrInterfaceDeclaration ) { type = mergeType ( ( ClassOrInterfaceDeclaration ) one , ( ClassOrInterfaceDeclaration ) two ) ; } else if ( one instanceof EmptyTypeDeclaration ) { type = mergeType ( ( EmptyTypeDeclaration ) one , ( EmptyTypeDeclaration ) two ) ; } else if ( one instanceof EnumDeclaration ) { type = mergeType ( ( EnumDeclaration ) one , ( EnumDeclaration ) two ) ; } } } else { type = findFirstNotNull ( one , two ) ; LOG . info ( "add {} --> {}" , type . getClass ( ) . getSimpleName ( ) , type . getName ( ) ) ; } return type ;
public class DoubleStream { /** * Returns { @ code DoubleStream } with elements that satisfy the given predicate . * < p > This is an intermediate operation . * < p > Example : * < pre > * predicate : ( a ) - & gt ; a & gt ; 2 * stream : [ 1 , 2 , 3 , 4 , - 8 , 0 , 11] * result : [ 3 , 4 , 11] * < / pre > * @ param predicate the predicate used to filter elements * @ return the new stream */ @ NotNull public DoubleStream filter ( @ NotNull final DoublePredicate predicate ) { } }
return new DoubleStream ( params , new DoubleFilter ( iterator , predicate ) ) ;
public class Protocol { /** * Convenience method which writes the output of internally called { @ link Protocol # send ( Object ) < code > send ( data ) < / code > } * to a { @ link java . io . ObjectOutput } . * @ param objectOutput the object output to write the packaged user - data to * @ throws IOException if there was an error writing to the < code > ObjectOutput < / code > * @ see Protocol # send ( Object ) < code > send ( data ) < / code > */ public synchronized Map . Entry < Short , Packet < T > > send ( T data , ObjectOutput objectOutput ) throws IOException { } }
Map . Entry < Short , Packet < T > > packetEntry = send ( data ) ; Packet . < T > writeExternalStatic ( packetEntry . getValue ( ) , objectOutput ) ; return packetEntry ;
public class MacSigner { /** * val keyLength = key . getEncoded . length * 8 */ public byte [ ] sign ( byte [ ] bytes ) { } }
try { Mac mac = Mac . getInstance ( algorithm ) ; mac . init ( key ) ; return mac . doFinal ( bytes ) ; } catch ( GeneralSecurityException e ) { throw new RuntimeException ( e ) ; }
public class WhileyFileParser { /** * Match a the end of a line . This is required to signal , for example , the * end of the current statement . */ private void matchEndLine ( ) { } }
// First , parse all whitespace characters except for new lines index = skipLineSpace ( index ) ; // Second , check whether we ' ve reached the end - of - file ( as signaled by // running out of tokens ) , or we ' ve encountered some token which not a // newline . if ( index >= tokens . size ( ) ) { return ; // EOF } else if ( tokens . get ( index ) . kind != NewLine ) { syntaxError ( "expected end-of-line" , tokens . get ( index ) ) ; } else { index = index + 1 ; }
public class UnconditionalValueDerefSet { /** * Get the set of dereference Locations for given value number . * @ param vn * the value number * @ return the set of dereference Locations */ public Set < Location > getDerefLocationSet ( ValueNumber vn ) { } }
Set < Location > derefLocationSet = derefLocationSetMap . get ( vn ) ; if ( derefLocationSet == null ) { derefLocationSet = new HashSet < > ( ) ; derefLocationSetMap . put ( vn , derefLocationSet ) ; } return derefLocationSet ;
public class MarcField { /** * A MARC field can be denoted by a key , independent of values . * This key is a string , consisting of tag , indicator , subfield IDs , delimited by a dollar sign . * @ return the key of this MARC field */ public String toKey ( ) { } }
return ( tag == null ? EMPTY_STRING : tag ) + KEY_DELIMITER + ( indicator == null ? EMPTY_STRING : indicator ) + KEY_DELIMITER + subfieldIds ;
public class ExtensionSpider { /** * Tells whether or not the given { @ code uri } is in scope . * @ param uri the uri that will be checked * @ return { @ code true } if the { @ code uri } is in scope , { @ code false } otherwise * @ since 2.5.0 * @ see Session # isInScope ( String ) */ protected boolean isTargetUriInScope ( String uri ) { } }
if ( uri == null ) { return false ; } return getModel ( ) . getSession ( ) . isInScope ( uri ) ;
public class responderpolicy_responderglobal_binding { /** * Use this API to fetch responderpolicy _ responderglobal _ binding resources of given name . */ public static responderpolicy_responderglobal_binding [ ] get ( nitro_service service , String name ) throws Exception { } }
responderpolicy_responderglobal_binding obj = new responderpolicy_responderglobal_binding ( ) ; obj . set_name ( name ) ; responderpolicy_responderglobal_binding response [ ] = ( responderpolicy_responderglobal_binding [ ] ) obj . get_resources ( service ) ; return response ;
public class Axis { /** * Getters / / / / / */ public void setDataType ( Series . DataType dataType ) { } }
if ( dataType != null && this . dataType != null && this . dataType != dataType ) { throw new IllegalArgumentException ( "Different Axes (e.g. Date, Number, String) cannot be mixed on the same chart!!" ) ; } this . dataType = dataType ;
public class Finalizer { /** * Cleans up a single reference . Catches and logs all throwables . * @ return true if the caller should continue , false if the associated FinalizableReferenceQueue * is no longer referenced . */ private boolean cleanUp ( Reference < ? > reference ) { } }
Method finalizeReferentMethod = getFinalizeReferentMethod ( ) ; if ( finalizeReferentMethod == null ) { return false ; } do { /* * This is for the benefit of phantom references . Weak and soft * references will have already been cleared by this point . */ reference . clear ( ) ; if ( reference == frqReference ) { /* * The client no longer has a reference to the * FinalizableReferenceQueue . We can stop . */ return false ; } try { finalizeReferentMethod . invoke ( reference ) ; } catch ( Throwable t ) { logger . log ( Level . SEVERE , "Error cleaning up after reference." , t ) ; } /* * Loop as long as we have references available so as not to waste * CPU looking up the Method over and over again . */ } while ( ( reference = queue . poll ( ) ) != null ) ; return true ;
public class DocumentSettings { /** * returns a private key from the given keystore , uses configured { @ link # KEYSTORE _ PASSWORD keystorepassword } * @ param ks * @ param alias * @ return * @ throws KeyStoreException * @ throws NoSuchAlgorithmException * @ throws UnrecoverableKeyException */ protected PrivateKey getKey ( KeyStore ks , String alias , char [ ] password ) throws KeyStoreException , NoSuchAlgorithmException , UnrecoverableKeyException { } }
return CertificateHelper . getKey ( ks , alias , password ) ;
public class BlockingQueueReceiver { /** * A BlockingQueueAdaptor requires a MessageTransportListener to be set in order to adapt a client side . * @ param listener * is the MessageTransportListener to push messages to when they come in . */ @ SuppressWarnings ( { } }
"rawtypes" , "unchecked" } ) @ Override public synchronized void start ( final Listener listener , final Infrastructure infra ) { if ( listener == null ) throw new IllegalArgumentException ( "Cannot pass null to " + BlockingQueueReceiver . class . getSimpleName ( ) + ".setListener" ) ; if ( this . listener != null ) throw new IllegalStateException ( "Cannot set a new Listener (" + SafeString . objectDescription ( listener ) + ") on a " + BlockingQueueReceiver . class . getSimpleName ( ) + " when there's one already set (" + SafeString . objectDescription ( this . listener ) + ")" ) ; this . listener = listener ; infra . getThreadingModel ( ) . runDaemon ( this , "BQReceiver-" + address . toString ( ) ) ;
public class ValueHolder { /** * Add a PropertyChangeListener to the listener list . * @ param listener The listener to add . */ public void addPropertyChangeListener ( PropertyChangeListener listener ) { } }
if ( propertyChangeSupport == null ) { propertyChangeSupport = new PropertyChangeSupport ( this ) ; } propertyChangeSupport . addPropertyChangeListener ( listener ) ;
public class TimeSeriesUtils { /** * Reverse an input time series along the time dimension * @ param in Input activations to reverse , with shape [ minibatch , size , timeSeriesLength ] * @ return Reversed activations */ public static INDArray reverseTimeSeries ( INDArray in ) { } }
if ( in == null ) { return null ; } if ( in . ordering ( ) != 'f' || in . isView ( ) || ! Shape . strideDescendingCAscendingF ( in ) ) { in = in . dup ( 'f' ) ; } // FIXME : int cast int [ ] idxs = new int [ ( int ) in . size ( 2 ) ] ; int j = 0 ; for ( int i = idxs . length - 1 ; i >= 0 ; i -- ) { idxs [ j ++ ] = i ; } INDArray inReshape = in . reshape ( 'f' , in . size ( 0 ) * in . size ( 1 ) , in . size ( 2 ) ) ; INDArray outReshape = Nd4j . pullRows ( inReshape , 0 , idxs , 'f' ) ; return outReshape . reshape ( 'f' , in . size ( 0 ) , in . size ( 1 ) , in . size ( 2 ) ) ;
public class I18nServiceToSale { /** * < p > Setter for hasName . < / p > * @ param pHasName reference */ @ Override public final void setHasName ( final ServiceToSale pHasName ) { } }
this . hasName = pHasName ; if ( this . itsId == null ) { this . itsId = new IdI18nServiceToSale ( ) ; } this . itsId . setHasName ( this . hasName ) ;
public class WaybackRequest { /** * create WaybackRequest for Capture - Query request . * @ param url target URL * @ param replay highlight date * @ param start start timestamp ( 14 - digit ) * @ param end end timestamp ( 14 - digit ) * @ return WaybackRequest */ public static WaybackRequest createCaptureQueryRequet ( String url , String replay , String start , String end ) { } }
WaybackRequest r = new WaybackRequest ( ) ; r . setCaptureQueryRequest ( ) ; r . setRequestUrl ( url ) ; r . setReplayTimestamp ( replay ) ; r . setStartTimestamp ( start ) ; r . setEndTimestamp ( end ) ; return r ;
public class ConcurrentIdentityHashMap { /** * Removes all of the mappings from this map . */ @ Override public void clear ( ) { } }
for ( int i = 0 ; i < this . segments . length ; ++ i ) { this . segments [ i ] . clear ( ) ; }
public class Config { /** * Read config object stored in YAML format from < code > InputStream < / code > * @ param inputStream object * @ return config * @ throws IOException error */ public static Config fromYAML ( InputStream inputStream ) throws IOException { } }
ConfigSupport support = new ConfigSupport ( ) ; return support . fromYAML ( inputStream , Config . class ) ;
public class StandardFieldsDialog { /** * Validates that the dialogue is tabbed and the given tab index is valid . * @ param tabIndex the index of the tab to validate . * @ throws IllegalArgumentException if the dialogue was not initialised with tabs or if no tab exists with the given index . * @ see # validateNotTabbed ( ) */ private void validateTabbed ( int tabIndex ) { } }
if ( ! isTabbed ( ) ) { throw new IllegalArgumentException ( "Not initialised as a tabbed dialog - must use method without tab parameters" ) ; } if ( tabIndex < 0 || tabIndex >= this . tabPanels . size ( ) ) { throw new IllegalArgumentException ( "Invalid tab index: " + tabIndex ) ; }
public class JsonStreamWriter { /** * Decrease the indent level . */ private void decreaseIndent ( ) throws IOException { } }
if ( m_pretty ) { m_writer . write ( '\n' ) ; m_indent = m_indent . substring ( 0 , m_indent . length ( ) - INDENT . length ( ) ) ; m_writer . write ( m_indent ) ; } m_firstNameValuePair . pop ( ) ;
public class KNXNetworkLinkIP { /** * { @ inheritDoc } When communicating with a KNX network which uses open medium , messages are broadcasted within * domain ( as opposite to system broadcast ) by default . Specify < code > dst = null < / code > for system broadcast . */ @ Override public void sendRequest ( final KNXAddress dst , final Priority p , final byte [ ] nsdu ) throws KNXLinkClosedException , KNXTimeoutException { } }
final int mc = mode == TUNNELING ? CEMILData . MC_LDATA_REQ : CEMILData . MC_LDATA_IND ; send ( mc , dst , p , nsdu , false ) ;
public class KieServerHttpRequest { /** * query parameter methods - - - - - */ public KieServerHttpRequest query ( final Object name , final Object value ) throws KieServerHttpRequestException { } }
getRequestInfo ( ) . setQueryParameter ( name . toString ( ) , value != null ? value . toString ( ) : null ) ; return this ;
public class XCalOutputProperties { /** * Sets the XML version to use ( defaults to " 1.0 " ) . * Note : Many JDKs only support 1.0 natively . For XML 1.1 support , add a * JAXP library like < a href = * " http : / / search . maven . org / # search % 7Cgav % 7C1%7Cg % 3A % 22xalan % 22%20AND % 20a % 3A % 22xalan % 22" * > xalan < / a > to your project . * @ param version the XML version or null to remove */ public void setXmlVersion ( String version ) { } }
if ( version == null ) { remove ( OutputKeys . VERSION ) ; return ; } put ( OutputKeys . VERSION , version ) ;
public class BatchedJmsTemplate { /** * Receive a batch of up to batchSize . Other than batching this method is the same as { @ link JmsTemplate # receive ( ) } * @ return A list of { @ link Message } * @ param batchSize The batch size * @ throws JmsException The { @ link JmsException } */ public List < Message > receiveBatch ( int batchSize ) throws JmsException { } }
Destination defaultDestination = getDefaultDestination ( ) ; if ( defaultDestination != null ) { return receiveBatch ( defaultDestination , batchSize ) ; } else { return receiveBatch ( getRequiredDefaultDestinationName ( ) , batchSize ) ; }
public class Datamodel { /** * Creates an { @ link LexemeIdValue } . * @ param id * a string of the form Ln . . . where n . . . is the string * representation of a positive integer number * @ param siteIri * IRI to identify the site , usually the first part of the entity * IRI of the site this belongs to , e . g . , * " http : / / www . wikidata . org / entity / " * @ return an { @ link LexemeIdValue } corresponding to the input */ public static LexemeIdValue makeLexemeIdValue ( String id , String siteIri ) { } }
return factory . getLexemeIdValue ( id , siteIri ) ;
public class ChatLinearLayoutManager { /** * Among the children that are suitable to be considered as an anchor child , returns the one * closest to the end of the layout . * Due to ambiguous adapter updates or children being removed , some children ' s positions may be * invalid . This method is a best effort to find a position within adapter bounds if possible . * It also prioritizes children that are within the visible bounds . * @ return A View that can be used an an anchor View . */ private View findReferenceChildClosestToEnd ( RecyclerView . State state ) { } }
return mShouldReverseLayout ? findFirstReferenceChild ( state . getItemCount ( ) ) : findLastReferenceChild ( state . getItemCount ( ) ) ;
public class MatrixVectorWriter { /** * Prints the vector size . Assumes coordinate format */ public void printVectorSize ( VectorSize size ) { } }
format ( Locale . ENGLISH , "%10d %19d%n" , size . size ( ) , size . numEntries ( ) ) ;
public class DefaultDatastoreWriter { /** * Deletes the entities having the given keys . * @ param keys * the entities ' keys * @ throws EntityManagerException * if any error occurs while deleting . */ public void deleteByKey ( List < DatastoreKey > keys ) { } }
try { Key [ ] nativeKeys = new Key [ keys . size ( ) ] ; for ( int i = 0 ; i < keys . size ( ) ; i ++ ) { nativeKeys [ i ] = keys . get ( i ) . nativeKey ( ) ; } nativeWriter . delete ( nativeKeys ) ; } catch ( DatastoreException exp ) { throw DatastoreUtils . wrap ( exp ) ; }
public class InvocationMessage { /** * { @ inheritDoc } */ @ Override public Object send ( String ... args ) throws Exception { } }
for ( Message message : messages ) { if ( message . getArity ( ) == args . length ) { return message . send ( args ) ; } } throw new IllegalArgumentException ( String . format ( "No such method with %d arguments" , args . length ) ) ;
public class StyleUtils { /** * Create new marker options populated with the feature row style ( icon or style ) * @ param featureStyleExtension feature style extension * @ param featureRow feature row * @ param density display density : { @ link android . util . DisplayMetrics # density } * @ return marker options populated with the feature style */ public static MarkerOptions createMarkerOptions ( FeatureStyleExtension featureStyleExtension , FeatureRow featureRow , float density ) { } }
MarkerOptions markerOptions = new MarkerOptions ( ) ; setFeatureStyle ( markerOptions , featureStyleExtension , featureRow , density ) ; return markerOptions ;
public class TtlRunnable { /** * wrap input { @ link Runnable } Collection to { @ link TtlRunnable } Collection . * @ param tasks task to be wrapped . if input is { @ code null } , return { @ code null } . * @ param releaseTtlValueReferenceAfterRun release TTL value reference after run , avoid memory leak even if { @ link TtlRunnable } is referred . * @ param idempotent is idempotent mode or not . if { @ code true } , just return input { @ link Runnable } when it ' s { @ link TtlRunnable } , * otherwise throw { @ link IllegalStateException } . * < B > < I > Caution < / I > < / B > : { @ code true } will cover up bugs ! < b > DO NOT < / b > set , only when you know why . * @ return wrapped tasks * @ throws IllegalStateException when input is { @ link TtlRunnable } already and not idempotent . */ @ Nonnull public static List < TtlRunnable > gets ( @ Nullable Collection < ? extends Runnable > tasks , boolean releaseTtlValueReferenceAfterRun , boolean idempotent ) { } }
if ( null == tasks ) return Collections . emptyList ( ) ; List < TtlRunnable > copy = new ArrayList < TtlRunnable > ( ) ; for ( Runnable task : tasks ) { copy . add ( TtlRunnable . get ( task , releaseTtlValueReferenceAfterRun , idempotent ) ) ; } return copy ;
public class StorageDownloadResponse { /** * 解析反馈内容 */ @ Override public T decodeContent ( InputStream in , Charset charset ) throws IOException { } }
// 解析报文内容 FdfsInputStream input = new FdfsInputStream ( in , getContentLength ( ) ) ; return callback . recv ( input ) ;
public class CoverageDataCore { /** * Get the X encoded location from the base provided x * @ param x * x location * @ param encodingType * pixel encoding type * @ return encoded x location */ private float getXEncodedLocation ( float x , GriddedCoverageEncodingType encodingType ) { } }
float xLocation = x ; switch ( encodingType ) { case CENTER : case AREA : xLocation += 0.5f ; break ; case CORNER : break ; default : throw new GeoPackageException ( "Unsupported Encoding Type: " + encodingType ) ; } return xLocation ;
public class MPrinter { /** * Renvoie la boîte de dialogue swing de choix du fichier d ' export . ( Initialisée pour s ' ouvrir sur le répertoire courant user . dir ) . * @ return JFileChooser */ public static synchronized JFileChooser getFileChooser ( ) { } }
// NOPMD if ( fileChooser == null ) { final String currentDirectory = System . getProperty ( "user.dir" ) ; fileChooser = new JFileChooser ( currentDirectory ) ; } return fileChooser ;
public class Problem { /** * see { @ link Train # readProblem ( InputStream , double ) } */ public static Problem readFromStream ( InputStream inputStream , double bias ) throws IOException , InvalidInputDataException { } }
return Train . readProblem ( inputStream , bias ) ;
public class SimpleBlas { /** * Compute x < - alpha * x ( scale a matrix ) */ public static FloatMatrix scal ( float alpha , FloatMatrix x ) { } }
NativeBlas . sscal ( x . length , alpha , x . data , 0 , 1 ) ; return x ;
public class ST_Split { /** * Split a geometry a according a geometry b . Supported operations are : * split a line by a line a line by a point a polygon by a line . * A default tolerance of 10E - 6 is used to snap the cutter point . * @ param geomA * @ param geomB * @ return * @ throws SQLException */ public static Geometry split ( Geometry geomA , Geometry geomB ) throws SQLException { } }
if ( geomA == null || geomB == null ) { return null ; } if ( geomA instanceof Polygon ) { return splitPolygonWithLine ( ( Polygon ) geomA , ( LineString ) geomB ) ; } else if ( geomA instanceof MultiPolygon ) { return splitMultiPolygonWithLine ( ( MultiPolygon ) geomA , ( LineString ) geomB ) ; } else if ( geomA instanceof LineString ) { if ( geomB instanceof LineString ) { return splitLineStringWithLine ( ( LineString ) geomA , ( LineString ) geomB ) ; } else if ( geomB instanceof Point ) { return splitLineWithPoint ( ( LineString ) geomA , ( Point ) geomB , PRECISION ) ; } } else if ( geomA instanceof MultiLineString ) { if ( geomB instanceof LineString ) { return splitMultiLineStringWithLine ( ( MultiLineString ) geomA , ( LineString ) geomB ) ; } else if ( geomB instanceof Point ) { return splitMultiLineStringWithPoint ( ( MultiLineString ) geomA , ( Point ) geomB , PRECISION ) ; } } throw new SQLException ( "Split a " + geomA . getGeometryType ( ) + " by a " + geomB . getGeometryType ( ) + " is not supported." ) ;
public class Protocol { /** * Convenience method which reads the input of an { @ link java . io . ObjectInput } with the help of * internally called { @ link Protocol # receive ( Packet ) < code > receive ( package ) < / code > } . * @ param objectInput the object input to read the packaged user - data from * @ throws IOException if there was an error reading from the < code > ObjectInput < / code > * @ throws ClassNotFoundException if the object being read from the < code > ObjectInput < / code > is not of the correct type * @ see Protocol # send ( Object ) < code > receive ( package ) < / code > */ public synchronized NavigableMap < Short , T > receive ( ObjectInput objectInput ) throws IOException , ClassNotFoundException { } }
Packet < T > packet = Packet . < T > readExternalStatic ( objectInput ) ; return receive ( packet ) ;
public class MarcRecord { /** * Return a list of MARC fields of this record where key pattern matches were found . * @ param pattern the pattern * @ return a list of MARC fields */ public List < MarcField > filterKey ( Pattern pattern ) { } }
return marcFields . stream ( ) . map ( field -> field . matchKey ( pattern ) ) . filter ( Objects :: nonNull ) . collect ( Collectors . toList ( ) ) ;
public class BlockPlacementPolicyRaid { /** * Get path for the parity file . Returns null if it does not exists * @ param codec the codec of the parity file . * @ return the toUri path of the parity file */ private NameWithINode getParityFile ( Codec codec , String src ) throws IOException { } }
String parity ; if ( codec . isDirRaid ) { String parent = getParentPath ( src ) ; parity = codec . parityDirectory + parent ; } else { parity = codec . parityDirectory + src ; } byte [ ] [ ] components = INodeDirectory . getPathComponents ( parity ) ; INode parityInode = namesystem . dir . getINode ( components ) ; if ( parityInode == null ) return null ; return new NameWithINode ( parity , parityInode ) ;
public class Alignments { /** * Factory method to run a list of scorers concurrently . This method runs the scorers in parallel by submitting * all of the scoring tasks to the shared thread pool of the { @ link ConcurrencyTools } utility . * @ param < S > each { @ link Sequence } of an alignment pair is of type S * @ param < C > each element of an { @ link AlignedSequence } is a { @ link Compound } of type C * @ param scorers list of scorers to run * @ return list of score results from running scorers */ public static < S extends Sequence < C > , C extends Compound > double [ ] runPairwiseScorers ( List < PairwiseSequenceScorer < S , C > > scorers ) { } }
int n = 1 , all = scorers . size ( ) ; List < Future < Double > > futures = new ArrayList < Future < Double > > ( ) ; for ( PairwiseSequenceScorer < S , C > scorer : scorers ) { futures . add ( ConcurrencyTools . submit ( new CallablePairwiseSequenceScorer < S , C > ( scorer ) , String . format ( "Scoring pair %d of %d" , n ++ , all ) ) ) ; } List < Double > results = getListFromFutures ( futures ) ; double [ ] scores = new double [ results . size ( ) ] ; for ( int i = 0 ; i < scores . length ; i ++ ) { scores [ i ] = results . get ( i ) ; } return scores ;
public class GerritTriggeredEvent { /** * Gerrit server - based time stamp when the event was created by Gerrit Server . * ONLY USE FOR UNIT TESTS ! * @ param eventCreatedOn the eventCreatedOn to set */ public void setEventCreatedOn ( String eventCreatedOn ) { } }
Long milliseconds = TimeUnit . SECONDS . toMillis ( Long . parseLong ( eventCreatedOn ) ) ; this . eventCreatedOn = new Date ( milliseconds ) ;
public class Plot { /** * Generates the Gnuplot script . * @ param basepath The base path to use . * @ param datafiles The names of the data files that need to be plotted , * in the order in which they ought to be plotted . It is assumed that * the ith file will correspond to the ith entry in { @ code datapoints } . * Can be { @ code null } if there ' s no data to plot . */ private void writeGnuplotScript ( final String basepath , final String [ ] datafiles ) throws IOException { } }
final String script_path = basepath + ".gnuplot" ; final PrintWriter gp = new PrintWriter ( script_path ) ; try { // XXX don ' t hardcode all those settings . At least not like that . gp . append ( "set term png small size " ) // Why the fuck didn ' t they also add methods for numbers ? . append ( Short . toString ( width ) ) . append ( "," ) . append ( Short . toString ( height ) ) ; final String smooth = params . remove ( "smooth" ) ; final String fgcolor = params . remove ( "fgcolor" ) ; final String style = params . remove ( "style" ) ; String bgcolor = params . remove ( "bgcolor" ) ; if ( fgcolor != null && bgcolor == null ) { // We can ' t specify a fgcolor without specifying a bgcolor . bgcolor = "xFFFFFF" ; // So use a default . } if ( bgcolor != null ) { if ( fgcolor != null && "transparent" . equals ( bgcolor ) ) { // In case we need to specify a fgcolor but we wanted a transparent // background , we also need to pass a bgcolor otherwise the first // hex color will be mistakenly taken as a bgcolor by Gnuplot . bgcolor = "transparent xFFFFFF" ; } gp . append ( ' ' ) . append ( bgcolor ) ; } if ( fgcolor != null ) { gp . append ( ' ' ) . append ( fgcolor ) ; } gp . append ( "\n" + "set xdata time\n" + "set timefmt \"%s\"\n" + "if (GPVAL_VERSION < 4.6) set xtics rotate; else set xtics rotate right\n" + "set output \"" ) . append ( basepath + ".png" ) . append ( "\"\n" + "set xrange [\"" ) . append ( String . valueOf ( ( start_time & UNSIGNED ) + utc_offset ) ) . append ( "\":\"" ) . append ( String . valueOf ( ( end_time & UNSIGNED ) + utc_offset ) ) . append ( "\"]\n" ) ; if ( ! params . containsKey ( "format x" ) ) { gp . append ( "set format x \"" ) . append ( xFormat ( ) ) . append ( "\"\n" ) ; } final int nseries = datapoints . size ( ) ; if ( nseries > 0 ) { gp . write ( "set grid\n" + "set style data " ) ; gp . append ( style != null ? style : "linespoint" ) . append ( "\n" ) ; if ( ! params . containsKey ( "key" ) ) { gp . write ( "set key right box\n" ) ; } } else { gp . write ( "unset key\n" ) ; if ( params == null || ! params . containsKey ( "label" ) ) { gp . write ( "set label \"No data\" at graph 0.5,0.9 center\n" ) ; } } if ( params != null ) { for ( final Map . Entry < String , String > entry : params . entrySet ( ) ) { final String key = entry . getKey ( ) ; final String value = entry . getValue ( ) ; if ( value != null ) { gp . append ( "set " ) . append ( key ) . append ( ' ' ) . append ( value ) . write ( '\n' ) ; } else { gp . append ( "unset " ) . append ( key ) . write ( '\n' ) ; } } } for ( final String opts : options ) { if ( opts . contains ( "x1y2" ) ) { // Create a second scale for the y - axis on the right - hand side . gp . write ( "set y2tics border\n" ) ; break ; } } // compile annotations to determine if we have any to graph final List < Annotation > notes = new ArrayList < Annotation > ( ) ; for ( int i = 0 ; i < nseries ; i ++ ) { final DataPoints dp = datapoints . get ( i ) ; final List < Annotation > series_notes = dp . getAnnotations ( ) ; if ( series_notes != null && ! series_notes . isEmpty ( ) ) { notes . addAll ( series_notes ) ; } } if ( globals != null ) { notes . addAll ( globals ) ; } if ( notes . size ( ) > 0 ) { Collections . sort ( notes ) ; for ( Annotation note : notes ) { String ts = Long . toString ( note . getStartTime ( ) ) ; String value = new String ( note . getDescription ( ) ) ; gp . append ( "set arrow from \"" ) . append ( ts ) . append ( "\", graph 0 to \"" ) ; gp . append ( ts ) . append ( "\", graph 1 nohead ls 3\n" ) ; gp . append ( "set object rectangle at \"" ) . append ( ts ) ; gp . append ( "\", graph 0 size char (strlen(\"" ) . append ( value ) ; gp . append ( "\")), char 1 front fc rgbcolor \"white\"\n" ) ; gp . append ( "set label \"" ) . append ( value ) . append ( "\" at \"" ) ; gp . append ( ts ) . append ( "\", graph 0 front center\n" ) ; } } gp . write ( "plot " ) ; for ( int i = 0 ; i < nseries ; i ++ ) { final DataPoints dp = datapoints . get ( i ) ; final String title = dp . metricName ( ) + dp . getTags ( ) ; gp . append ( " \"" ) . append ( datafiles [ i ] ) . append ( "\" using 1:2" ) ; if ( smooth != null ) { gp . append ( " smooth " ) . append ( smooth ) ; } // TODO ( tsuna ) : Escape double quotes in title . gp . append ( " title \"" ) . append ( title ) . write ( '"' ) ; final String opts = options . get ( i ) ; if ( ! opts . isEmpty ( ) ) { gp . append ( ' ' ) . write ( opts ) ; } if ( i != nseries - 1 ) { gp . print ( ", \\" ) ; } gp . write ( '\n' ) ; } if ( nseries == 0 ) { gp . write ( '0' ) ; } } finally { gp . close ( ) ; LOG . info ( "Wrote Gnuplot script to " + script_path ) ; }
public class ChronoHistory { /** * / * [ deutsch ] * < p > Ist das angegebene historische Datum g & uuml ; ltig ? < / p > * < p > Wenn das Argument { @ code null } ist , liefert die Methode { @ code false } . < / p > * @ param date historic calendar date to be checked , maybe { @ code null } * @ return { @ code false } if given date is invalid else { @ code true } * @ since 3.0 */ public boolean isValid ( HistoricDate date ) { } }
if ( ( date == null ) || this . isOutOfRange ( date ) ) { return false ; } Calculus algorithm = this . getAlgorithm ( date ) ; return ( ( algorithm != null ) && algorithm . isValid ( date ) ) ;
public class JavaUserDefinedTypedAggregation { /** * $ example off : typed _ custom _ aggregation $ */ public static void main ( String [ ] args ) { } }
SparkSession spark = SparkSession . builder ( ) . appName ( "Java Spark SQL user-defined Datasets aggregation example" ) . getOrCreate ( ) ; // $ example on : typed _ custom _ aggregation $ Encoder < Employee > employeeEncoder = Encoders . bean ( Employee . class ) ; String path = "examples/src/main/resources/employees.json" ; Dataset < Employee > ds = spark . read ( ) . json ( path ) . as ( employeeEncoder ) ; ds . show ( ) ; // | name | salary | // | Michael | 3000 | // | Andy | 4500 | // | Justin | 3500 | // | Berta | 4000 | MyAverage myAverage = new MyAverage ( ) ; // Convert the function to a ` TypedColumn ` and give it a name TypedColumn < Employee , Double > averageSalary = myAverage . toColumn ( ) . name ( "average_salary" ) ; Dataset < Double > result = ds . select ( averageSalary ) ; result . show ( ) ; // | average _ salary | // | 3750.0 | // $ example off : typed _ custom _ aggregation $ spark . stop ( ) ;
public class SequenceGeneratorImpl { /** * Returns the < code > initial - value < / code > attribute * @ return the value defined for the attribute < code > initial - value < / code > */ public Integer getInitialValue ( ) { } }
if ( childNode . getAttribute ( "initial-value" ) != null && ! childNode . getAttribute ( "initial-value" ) . equals ( "null" ) ) { return Integer . valueOf ( childNode . getAttribute ( "initial-value" ) ) ; } return null ;
public class Factor { /** * Typically , levels are automatically computed from the data , but in some rare instances , the user may want to * specify the levels explicitly , e . g . , when the data column does not contain all the levels desired . * @ param levels * @ param isOrdered a flag indicating whether the levels actually have " less than " and " greater than " left - to - right order * meaning * @ throws DDFException */ public void setLevels ( List < String > levels , boolean isOrdered ) throws DDFException { } }
this . setLevels ( levels , null , isOrdered ) ;
public class RestTemplate { /** * Execute the given method on the provided URI . * < p > The { @ link ClientHttpRequest } is processed using the { @ link RequestCallback } ; * the response with the { @ link ResponseExtractor } . * @ param url the fully - expanded URL to connect to * @ param method the HTTP method to execute ( GET , POST , etc . ) * @ param requestCallback object that prepares the request ( can be { @ code null } ) * @ param responseExtractor object that extracts the return value from the response ( can be { @ code null } ) * @ return an arbitrary object , as returned by the { @ link ResponseExtractor } */ protected < T > T doExecute ( URI url , HttpMethod method , RequestCallback requestCallback , ResponseExtractor < T > responseExtractor ) throws RestClientException { } }
Assert . notNull ( url , "'url' must not be null" ) ; Assert . notNull ( method , "'method' must not be null" ) ; ClientHttpResponse response = null ; try { ClientHttpRequest request = createRequest ( url , method ) ; if ( requestCallback != null ) { requestCallback . doWithRequest ( request ) ; } response = request . execute ( ) ; if ( ! getErrorHandler ( ) . hasError ( response ) ) { logResponseStatus ( method , url , response ) ; } else { handleResponseError ( method , url , response ) ; } if ( responseExtractor != null ) { return responseExtractor . extractData ( response ) ; } else { return null ; } } catch ( IOException ex ) { throw new ResourceAccessException ( "I/O error on " + method . name ( ) + " request for \"" + url + "\": " + ex . getMessage ( ) , ex ) ; } finally { if ( response != null ) { response . close ( ) ; } }
public class FileSystem { /** * Returns the number of blocks this file / directory consists of * assuming the file system ' s standard block size . * @ param file * the file * @ return the number of block ' s the file / directory consists of * @ throws IOException */ public int getNumberOfBlocks ( final FileStatus file ) throws IOException { } }
int numberOfBlocks = 0 ; if ( file == null ) { return 0 ; } // For a file , this is easy if ( ! file . isDir ( ) ) { return getNumberOfBlocks ( file . getLen ( ) , file . getBlockSize ( ) ) ; } // file is a directory final FileStatus [ ] files = this . listStatus ( file . getPath ( ) ) ; for ( int i = 0 ; i < files . length ; i ++ ) { if ( ! files [ i ] . isDir ( ) ) { numberOfBlocks += getNumberOfBlocks ( files [ i ] . getLen ( ) , files [ i ] . getBlockSize ( ) ) ; } } return numberOfBlocks ;
public class EventSubscription { /** * The list of Amazon Redshift event categories specified in the event notification subscription . * Values : Configuration , Management , Monitoring , Security * @ return The list of Amazon Redshift event categories specified in the event notification subscription . < / p > * Values : Configuration , Management , Monitoring , Security */ public java . util . List < String > getEventCategoriesList ( ) { } }
if ( eventCategoriesList == null ) { eventCategoriesList = new com . amazonaws . internal . SdkInternalList < String > ( ) ; } return eventCategoriesList ;
public class HandlerArrayFromFixed { /** * this method returns a field description array given a set of positions ( assuming there are no * pads in the intended string ) * @ param offset The relative offset of the beginning of the line compared to the * offsets in ' positions ' . I find it easier to use my text editor to * identify positions , and it counts the first column as ' 1 ' , so I can * set ' first ' = 1 , and then use the column number of the rest of the * fields as reported by my text editor . * @ param positions An array of numbers indicating positions of fields in the data lines , * starting at the first field . * @ return a two dimensional array of start and end positions for tokens */ public static int [ ] [ ] fieldsFromPositions ( int offset , int ... positions ) { } }
int fieldCount = positions . length - 1 ; int [ ] [ ] fields = new int [ fieldCount ] [ 2 ] ; int last = positions [ 0 ] - offset ; for ( int i = 0 ; i < fieldCount ; ++ i ) { fields [ i ] [ 0 ] = last ; last = positions [ i + 1 ] - offset ; fields [ i ] [ 1 ] = last ; } return fields ;
public class JCRStatisticsManager { /** * Add one line of data to the csv file related to the given context . */ private static void printData ( StatisticsContext context ) { } }
if ( context . writer == null ) { return ; } boolean first = true ; if ( context . global != null ) { context . global . printData ( context . writer ) ; first = false ; } for ( Statistics s : context . allStatistics . values ( ) ) { if ( first ) { first = false ; } else { context . writer . print ( ',' ) ; } s . printData ( context . writer ) ; } context . writer . println ( ) ; context . writer . flush ( ) ;
public class CliFrontend { /** * Displays an exception message . * @ param t The exception to display . * @ return The return code for the process . */ private static int handleError ( Throwable t ) { } }
LOG . error ( "Error while running the command." , t ) ; System . err . println ( ) ; System . err . println ( "------------------------------------------------------------" ) ; System . err . println ( " The program finished with the following exception:" ) ; System . err . println ( ) ; if ( t . getCause ( ) instanceof InvalidProgramException ) { System . err . println ( t . getCause ( ) . getMessage ( ) ) ; StackTraceElement [ ] trace = t . getCause ( ) . getStackTrace ( ) ; for ( StackTraceElement ele : trace ) { System . err . println ( "\t" + ele ) ; if ( ele . getMethodName ( ) . equals ( "main" ) ) { break ; } } } else { t . printStackTrace ( ) ; } return 1 ;
public class Database { /** * Adds a server to the list of available servers . * @ param id Neo4j cluster id */ public synchronized void onServerAvailable ( final String id , final String role ) { } }
logger . debug ( "[onServerAvailable] id = {}, role = {}" , id , role ) ; Server server = getServerById ( id ) ; boolean isMaster = role . equals ( "master" ) ; boolean isSlave = role . equals ( "slave" ) ; if ( server == null ) { return ; } if ( server . isAvailable ( ) ) { if ( server . isMaster ( ) && isMaster ) return ; if ( ! server . isMaster ( ) && isSlave ) return ; } if ( isMaster || isSlave ) { server . setAvailable ( true ) ; try { server . connect ( ) ; } catch ( Exception e ) { logger . error ( "[onServerAvailable]" , e ) ; } } server . setAvailable ( true ) ; if ( isMaster ) { setWriteServer ( server ) ; } refreshServers ( ) ;
public class GobblinClusterManager { /** * Build the { @ link GobblinHelixJobScheduler } for the Application Master . */ private GobblinHelixJobScheduler buildGobblinHelixJobScheduler ( Config config , Path appWorkDir , List < ? extends Tag < ? > > metadataTags , SchedulerService schedulerService ) throws Exception { } }
Properties properties = ConfigUtils . configToProperties ( config ) ; return new GobblinHelixJobScheduler ( properties , this . multiManager . getJobClusterHelixManager ( ) , this . multiManager . getTaskDriverHelixManager ( ) , this . eventBus , appWorkDir , metadataTags , schedulerService , this . jobCatalog ) ;
public class WidgetUtil { /** * Creates the HTML to display a Flash movie for the browser on which we ' re running . * @ param flashVars a pre - URLEncoded string containing flash variables , or null . * http : / / www . adobe . com / cfusion / knowledgebase / index . cfm ? id = tn _ 16417 */ public static HTML createFlashContainer ( String ident , String movie , int width , int height , String flashVars ) { } }
return createContainer ( new FlashObject ( ident , movie , width , height , flashVars ) ) ;
public class ChatDirector { /** * Registers all the chat - command handlers . */ protected void registerCommandHandlers ( ) { } }
MessageBundle msg = _ctx . getMessageManager ( ) . getBundle ( _bundle ) ; registerCommandHandler ( msg , "help" , new HelpHandler ( ) ) ; registerCommandHandler ( msg , "clear" , new ClearHandler ( ) ) ; registerCommandHandler ( msg , "speak" , new SpeakHandler ( ) ) ; registerCommandHandler ( msg , "emote" , new EmoteHandler ( ) ) ; registerCommandHandler ( msg , "think" , new ThinkHandler ( ) ) ; registerCommandHandler ( msg , "tell" , new TellHandler ( ) ) ; registerCommandHandler ( msg , "broadcast" , new BroadcastHandler ( ) ) ;
public class ManagementViewClientCriterion { /** * Configures the elements of the composite accept criterion for the * Management View . * @ return accept criterion elements */ static ServerViewComponentClientCriterion [ ] createViewComponentClientCriteria ( ) { } }
final ServerViewComponentClientCriterion [ ] criteria = new ServerViewComponentClientCriterion [ 4 ] ; // Target table acceptable components . criteria [ 0 ] = ServerViewComponentClientCriterion . createBuilder ( ) . dragSourceIdPrefix ( UIComponentIdProvider . TARGET_TABLE_ID ) . dropTargetIdPrefixes ( SPUIDefinitions . TARGET_TAG_ID_PREFIXS , UIComponentIdProvider . DIST_TABLE_ID ) . dropAreaIds ( UIComponentIdProvider . TARGET_TAG_DROP_AREA_ID , UIComponentIdProvider . DIST_TABLE_ID ) . build ( ) ; // Target Tag acceptable components . criteria [ 1 ] = ServerViewComponentClientCriterion . createBuilder ( ) . dragSourceIdPrefix ( SPUIDefinitions . TARGET_TAG_ID_PREFIXS ) . dropTargetIdPrefixes ( UIComponentIdProvider . TARGET_TABLE_ID , UIComponentIdProvider . DIST_TABLE_ID ) . dropAreaIds ( UIComponentIdProvider . TARGET_TABLE_ID , UIComponentIdProvider . DIST_TABLE_ID ) . build ( ) ; // Distribution table acceptable components . criteria [ 2 ] = ServerViewComponentClientCriterion . createBuilder ( ) . dragSourceIdPrefix ( UIComponentIdProvider . DIST_TABLE_ID ) . dropTargetIdPrefixes ( UIComponentIdProvider . TARGET_TABLE_ID , UIComponentIdProvider . TARGET_DROP_FILTER_ICON , SPUIDefinitions . DISTRIBUTION_TAG_ID_PREFIXS ) . dropAreaIds ( UIComponentIdProvider . TARGET_TABLE_ID , UIComponentIdProvider . TARGET_DROP_FILTER_ICON , UIComponentIdProvider . DISTRIBUTION_TAG_TABLE_ID ) . build ( ) ; // Distribution tag acceptable components . criteria [ 3 ] = ServerViewComponentClientCriterion . createBuilder ( ) . dragSourceIdPrefix ( SPUIDefinitions . DISTRIBUTION_TAG_ID_PREFIXS ) . dropTargetIdPrefixes ( UIComponentIdProvider . DIST_TABLE_ID ) . dropAreaIds ( UIComponentIdProvider . DIST_TABLE_ID ) . build ( ) ; return criteria ;
public class MungeCsv { /** * CSV reader and predictor test program . * @ param args Command - line args . * @ throws Exception */ public static void main ( String [ ] args ) throws Exception { } }
parseArgs ( args ) ; GenMunger rawMunger ; rawMunger = ( hex . genmodel . GenMunger ) Class . forName ( assemblyClassName ) . newInstance ( ) ; BufferedReader input = new BufferedReader ( new FileReader ( inputCSVFileName ) ) ; BufferedWriter output = new BufferedWriter ( new FileWriter ( outputCSVFileName ) ) ; // Emit outputCSV column names . String [ ] rawHeader = rawMunger . outNames ( ) ; StringBuilder header = new StringBuilder ( ) ; for ( int i = 0 ; i < rawHeader . length ; ++ i ) { header . append ( "\"" ) . append ( rawHeader [ i ] ) . append ( "\"" ) ; if ( i < rawHeader . length - 1 ) header . append ( "," ) ; } output . write ( header . toString ( ) ) ; output . write ( "\n" ) ; // Loop over inputCSV one row at a time . int lineNum = 0 ; String line ; try { while ( ( line = input . readLine ( ) ) != null ) { lineNum ++ ; // skip the header . if ( lineNum == 1 ) continue ; // Parse the CSV line . Somewhat handles quoted commas . But this ain ' t no parser test ! RowData row ; try { row = parseDataRow ( line , rawMunger ) ; } catch ( NumberFormatException nfe ) { nfe . printStackTrace ( ) ; System . out . println ( "Failed to parse row: " + lineNum ) ; throw new RuntimeException ( ) ; } RowData mungedRow = rawMunger . fit ( row ) ; for ( int i = 0 ; i < rawMunger . outNames ( ) . length ; ++ i ) { Object val = mungedRow == null ? Double . NaN : mungedRow . get ( rawMunger . outNames ( ) [ i ] ) ; if ( val instanceof Double ) output . write ( String . valueOf ( val ) ) ; else output . write ( "\"" + val + "\"" ) ; if ( i < rawMunger . outNames ( ) . length - 1 ) output . write ( "," ) ; } output . write ( "\n" ) ; } } catch ( Exception e ) { System . out . println ( "Caught exception on line " + lineNum ) ; System . out . println ( "" ) ; e . printStackTrace ( ) ; System . exit ( 1 ) ; } finally { // Clean up . output . close ( ) ; input . close ( ) ; } // Predictions were successfully generated . Calling program can now compare them with something . System . exit ( 0 ) ;
public class BifurcatedConsumerSessionProxy { /** * This method is used to unlock a set of locked messages held by the message processor . * This call will simply be passed onto the server who will call the method on the real * bifurcated consumer session residing on the server . * @ param msgHandles An array of message ids that denote the messages to be unlocked . * @ throws com . ibm . wsspi . sib . core . exception . SISessionUnavailableException * @ throws com . ibm . wsspi . sib . core . exception . SISessionDroppedException * @ throws com . ibm . wsspi . sib . core . exception . SIConnectionUnavailableException * @ throws com . ibm . wsspi . sib . core . exception . SIConnectionDroppedException * @ throws com . ibm . websphere . sib . exception . SIResourceException * @ throws com . ibm . wsspi . sib . core . exception . SIConnectionLostException * @ throws com . ibm . websphere . sib . exception . SIIncorrectCallException * @ throws com . ibm . wsspi . sib . core . exception . SIMessageNotLockedException * @ throws com . ibm . websphere . sib . exception . SIErrorException */ public void unlockSet ( SIMessageHandle [ ] msgHandles ) throws SISessionUnavailableException , SISessionDroppedException , SIConnectionUnavailableException , SIConnectionDroppedException , SIResourceException , SIConnectionLostException , SIIncorrectCallException , SIMessageNotLockedException , SIErrorException { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( this , tc , "unlockSet" , new Object [ ] { msgHandles . length + " msg ids" } ) ; unlockSet ( msgHandles , true ) ; // True is the default as it matches with old behaviour . if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( this , tc , "unlockSet" ) ;
public class Property { /** * Create a { @ link Property . PositionFactory } for the property specified by { @ code mutator } , whose original source * form is expected to have been a method reference for a valid Java bean mutator method , in which case it is * required that the calling class be processed with the { @ code therian - property - method - weaver } for Apache Commons * Weaver , which wraps the reference in a { @ link Mutator } object . Alternatively the { @ link Mutator } can be * explicitly instantiated . * @ param accessor * @ return { @ link PositionFactory } * @ throws IllegalArgumentException if { @ code accessor } is not an { @ link Accessor } instance at runtime . */ public static < P , T > PositionFactory < P , T > at ( BiConsumer < ? super P , ? super T > mutator ) { } }
Validate . isInstanceOf ( Mutator . class , mutator , "Cannot detect property from %s; missing %s?" , mutator , THERIAN_PROPERTY_METHOD_WEAVER ) ; return new PositionFactory < > ( ( ( Mutator < ? super P , ? super T > ) mutator ) . propertyName ) ;
public class ConfigStoreUtils { /** * Get topics from config store . * Topics will either be whitelisted or blacklisted using tag . * After filtering out topics via tag , their config property is checked . * For each shortlisted topic , config must contain either property topic . blacklist or topic . whitelist * If tags are not provided , it will return all topics */ public static List < KafkaTopic > getTopicsFromConfigStore ( Properties properties , String configStoreUri , GobblinKafkaConsumerClient kafkaConsumerClient ) { } }
ConfigClient configClient = ConfigClient . createConfigClient ( VersionStabilityPolicy . WEAK_LOCAL_STABILITY ) ; State state = new State ( ) ; state . setProp ( KafkaSource . TOPIC_WHITELIST , ".*" ) ; state . setProp ( KafkaSource . TOPIC_BLACKLIST , StringUtils . EMPTY ) ; List < KafkaTopic > allTopics = kafkaConsumerClient . getFilteredTopics ( DatasetFilterUtils . getPatternList ( state , KafkaSource . TOPIC_BLACKLIST ) , DatasetFilterUtils . getPatternList ( state , KafkaSource . TOPIC_WHITELIST ) ) ; Optional < Config > runtimeConfig = ConfigClientUtils . getOptionalRuntimeConfig ( properties ) ; if ( properties . containsKey ( GOBBLIN_CONFIG_TAGS_WHITELIST ) ) { Preconditions . checkArgument ( properties . containsKey ( GOBBLIN_CONFIG_FILTER ) , "Missing required property " + GOBBLIN_CONFIG_FILTER ) ; String filterString = properties . getProperty ( GOBBLIN_CONFIG_FILTER ) ; Path whiteListTagUri = PathUtils . mergePaths ( new Path ( configStoreUri ) , new Path ( properties . getProperty ( GOBBLIN_CONFIG_TAGS_WHITELIST ) ) ) ; List < String > whitelistedTopics = new ArrayList < > ( ) ; ConfigStoreUtils . getTopicsURIFromConfigStore ( configClient , whiteListTagUri , filterString , runtimeConfig ) . stream ( ) . filter ( ( URI u ) -> ConfigUtils . getBoolean ( ConfigStoreUtils . getConfig ( configClient , u , runtimeConfig ) , KafkaSource . TOPIC_WHITELIST , false ) ) . forEach ( ( ( URI u ) -> whitelistedTopics . add ( ConfigStoreUtils . getTopicNameFromURI ( u ) ) ) ) ; return allTopics . stream ( ) . filter ( ( KafkaTopic p ) -> whitelistedTopics . contains ( p . getName ( ) ) ) . collect ( Collectors . toList ( ) ) ; } else if ( properties . containsKey ( GOBBLIN_CONFIG_TAGS_BLACKLIST ) ) { Preconditions . checkArgument ( properties . containsKey ( GOBBLIN_CONFIG_FILTER ) , "Missing required property " + GOBBLIN_CONFIG_FILTER ) ; String filterString = properties . getProperty ( GOBBLIN_CONFIG_FILTER ) ; Path blackListTagUri = PathUtils . mergePaths ( new Path ( configStoreUri ) , new Path ( properties . getProperty ( GOBBLIN_CONFIG_TAGS_BLACKLIST ) ) ) ; List < String > blacklistedTopics = new ArrayList < > ( ) ; ConfigStoreUtils . getTopicsURIFromConfigStore ( configClient , blackListTagUri , filterString , runtimeConfig ) . stream ( ) . filter ( ( URI u ) -> ConfigUtils . getBoolean ( ConfigStoreUtils . getConfig ( configClient , u , runtimeConfig ) , KafkaSource . TOPIC_BLACKLIST , false ) ) . forEach ( ( ( URI u ) -> blacklistedTopics . add ( ConfigStoreUtils . getTopicNameFromURI ( u ) ) ) ) ; return allTopics . stream ( ) . filter ( ( KafkaTopic p ) -> ! blacklistedTopics . contains ( p . getName ( ) ) ) . collect ( Collectors . toList ( ) ) ; } else { log . warn ( "None of the blacklist or whitelist tags are provided" ) ; return allTopics ; }
public class ObjectiveMessageResources { /** * Does the application bundle already have extends handling ? < br > * It returns true if the bundle has a parent instance of { @ link MessageResourceBundleObjectiveWrapper } . * @ param appBundle The bundle for application for determination . ( NotNull ) * @ return The determination , true or false . */ protected boolean isAlreadyExtends ( MessageResourceBundle appBundle ) { } }
MessageResourceBundle currentBundle = appBundle ; boolean found = false ; while ( true ) { MessageResourceBundle parentBundle = currentBundle . getParent ( ) ; if ( parentBundle == null ) { break ; } if ( parentBundle instanceof MessageResourceBundleObjectiveWrapper ) { found = true ; break ; } currentBundle = parentBundle ; } return found ;
public class TeaServletAdmin { /** * Returns information about all functions available to the templates . */ public FunctionInfo [ ] getFunctions ( ) { } }
// TODO : make this a little more useful by showing more function // details . ApplicationInfo [ ] AppInf = getApplications ( ) ; FunctionInfo [ ] funcArray = null ; try { MethodDescriptor [ ] methods = Introspector . getBeanInfo ( HttpContext . class ) . getMethodDescriptors ( ) ; List < FunctionInfo > funcList = new Vector < FunctionInfo > ( 50 ) ; for ( int i = 0 ; i < methods . length ; i ++ ) { MethodDescriptor m = methods [ i ] ; if ( m . getMethod ( ) . getDeclaringClass ( ) != Object . class && ! m . getMethod ( ) . getName ( ) . equals ( "print" ) && ! m . getMethod ( ) . getName ( ) . equals ( "toString" ) ) { funcList . add ( new FunctionInfo ( m , null ) ) ; } } for ( int i = 0 ; i < AppInf . length ; i ++ ) { FunctionInfo [ ] ctxFunctions = AppInf [ i ] . getContextFunctions ( ) ; for ( int j = 0 ; j < ctxFunctions . length ; j ++ ) { funcList . add ( ctxFunctions [ j ] ) ; } } funcArray = funcList . toArray ( new FunctionInfo [ funcList . size ( ) ] ) ; Arrays . sort ( funcArray ) ; } catch ( Exception ie ) { ie . printStackTrace ( ) ; } return funcArray ;
public class MenuBuilder { /** * Remove the item at the given index and optionally forces menu views to update . * @ param index The index of the item to be removed . If this index is * invalid an exception is thrown . * @ param updateChildrenOnMenuViews Whether to force update on menu views . Please make sure you * eventually call this after your batch of removals . */ private void removeItemAtInt ( int index , boolean updateChildrenOnMenuViews ) { } }
if ( ( index < 0 ) || ( index >= mItems . size ( ) ) ) { return ; } mItems . remove ( index ) ; if ( updateChildrenOnMenuViews ) { onItemsChanged ( true ) ; }
public class AbstractColorPickerPreference { /** * Sets the border width of the preview of the preference ' s color . * @ param borderWidth * The border width , which should be set , as an { @ link Integer } value in pixels . The * border width must be at least 0 */ public final void setPreviewBorderWidth ( final int borderWidth ) { } }
Condition . INSTANCE . ensureAtLeast ( borderWidth , 0 , "The border width must be at least 0" ) ; this . previewBorderWidth = borderWidth ; if ( previewLoader != null ) { previewLoader . setBorderWidth ( borderWidth ) ; } adaptPreviewView ( ) ;
public class GeometricDoubleBondEncoderFactory { /** * Utility method for shifting a specified value in an index to the back * ( see { @ link # permutation ( int [ ] ) } ) . * @ param neighbors list of neighbors * @ param v the value to shift to the back * @ return < i > neighbors < / i > array */ static int [ ] moveToBack ( int [ ] neighbors , int v ) { } }
int j = 0 ; for ( int i = 0 ; i < neighbors . length ; i ++ ) { if ( neighbors [ i ] != v ) { neighbors [ j ++ ] = neighbors [ i ] ; } } neighbors [ neighbors . length - 1 ] = v ; return neighbors ;
public class PhotosApi { /** * Returns a list of your photos with no tags . * < br > * This method requires authentication with ' read ' permission . * @ param minUploadDate Optional . Minimum upload date . Photos with an upload date greater than or equal to this value will be returned . * @ param maxUploadDate Optional . Maximum upload date . Photos with an upload date less than or equal to this value will be returned . * @ param minTakenDate Optional . Minimum taken date . Photos with an taken date greater than or equal to this value will be returned . * @ param maxTakenDate Optional . Maximum taken date . Photos with an taken date less than or equal to this value will be returned . * @ param privacyFilter Optional . Return photos only matching a certain privacy level . * @ param mediaType Optional . Filter results by media type . * @ param extras Optional . Extra information to fetch for each returned record . * @ param perPage Optional . Number of photos to return per page . If this argument is zero , it defaults to 100 . The maximum allowed value is 500. * @ param page Optional . The page of results to return . If this argument is zero , it defaults to 1. * @ return photos object . * @ throws JinxException if there are any errors . * @ see < a href = " https : / / www . flickr . com / services / api / flickr . photos . getUntagged . html " > flickr . photos . getUntagged < / a > */ public Photos getUntagged ( Date minUploadDate , Date maxUploadDate , Date minTakenDate , Date maxTakenDate , JinxConstants . PrivacyFilter privacyFilter , JinxConstants . MediaType mediaType , EnumSet < JinxConstants . PhotoExtras > extras , int perPage , int page ) throws JinxException { } }
Map < String , String > params = new TreeMap < > ( ) ; params . put ( "method" , "flickr.photos.getUntagged" ) ; if ( minUploadDate != null ) { params . put ( "min_upload_date" , JinxUtils . formatDateAsUnixTimestamp ( minUploadDate ) ) ; } if ( maxUploadDate != null ) { params . put ( "max_upload_date" , JinxUtils . formatDateAsUnixTimestamp ( maxUploadDate ) ) ; } if ( minTakenDate != null ) { params . put ( "min_taken_date" , JinxUtils . formatDateAsUnixTimestamp ( minTakenDate ) ) ; } if ( maxTakenDate != null ) { params . put ( "max_taken_date" , JinxUtils . formatDateAsUnixTimestamp ( maxTakenDate ) ) ; } if ( privacyFilter != null ) { params . put ( "privacy_filter" , Integer . toString ( JinxUtils . privacyFilterToFlickrPrivacyFilterId ( privacyFilter ) ) ) ; } if ( mediaType != null ) { params . put ( "media" , mediaType . toString ( ) ) ; } if ( ! JinxUtils . isNullOrEmpty ( extras ) ) { params . put ( "extras" , JinxUtils . buildCommaDelimitedList ( extras ) ) ; } if ( perPage > 0 ) { params . put ( "per_page" , Integer . toString ( perPage ) ) ; } if ( page > 0 ) { params . put ( "page" , Integer . toString ( page ) ) ; } return jinx . flickrGet ( params , Photos . class ) ;
public class ResourceFinder { /** * Executes { @ link # findString ( String ) } assuming the contents URL found is the name of * a class that should be loaded and returned . * @ param uri * @ return * @ throws IOException * @ throws ClassNotFoundException */ public Class findClass ( String uri ) throws IOException , ClassNotFoundException { } }
String className = findString ( uri ) ; return classLoader . loadClass ( className ) ;
public class VerificationConditionGenerator { /** * Flatten an assumption set upto a given ancestor . That is , do not include the * ancestor or any of its ancestors in the results . This is a little like taking * the difference of the given assumptions and the given ancestor ' s assumptions . * @ param assumptions * The assumption set to be flattened * @ param ancestor * An ancestor of the given assumption set , or null to indicate all * ancestors should be included * @ return */ private WyalFile . Stmt flattenUpto ( AssumptionSet assumptions , AssumptionSet ancestor ) { } }
if ( assumptions == ancestor ) { // We have reached the ancestor return null ; } else { // Flattern parent assumptions AssumptionSet [ ] parents = assumptions . parents ; WyalFile . Stmt e = null ; switch ( parents . length ) { case 0 : // do nothing break ; case 1 : // easy e = flattenUpto ( parents [ 0 ] , ancestor ) ; break ; default : // harder AssumptionSet lca = assumptions . commonAncestor ; WyalFile . Stmt factor = flattenUpto ( lca , ancestor ) ; for ( int i = 0 ; i != parents . length ; ++ i ) { e = or ( e , flattenUpto ( parents [ i ] , lca ) ) ; } e = and ( factor , e ) ; } // Combine with local assumptions ( if applicable ) WyalFile . Stmt [ ] local = assumptions . assumptions ; for ( int i = 0 ; i != local . length ; ++ i ) { e = and ( e , local [ i ] ) ; } return e ; }
public class EntryStream { /** * Returns a sequential { @ code EntryStream } containing a single key - value * pair * @ param < K > the type of key * @ param < V > the type of value * @ param key the key of the single element * @ param value the value of the single element * @ return a singleton sequential stream */ public static < K , V > EntryStream < K , V > of ( K key , V value ) { } }
return of ( Stream . of ( new SimpleImmutableEntry < > ( key , value ) ) ) ;
public class TypeExtractor { /** * Recursively determine all declared fields * This is required because class . getFields ( ) is not returning fields defined * in parent classes . * @ param clazz class to be analyzed * @ param ignoreDuplicates if true , in case of duplicate field names only the lowest one * in a hierarchy will be returned ; throws an exception otherwise * @ return list of fields */ @ PublicEvolving public static List < Field > getAllDeclaredFields ( Class < ? > clazz , boolean ignoreDuplicates ) { } }
List < Field > result = new ArrayList < Field > ( ) ; while ( clazz != null ) { Field [ ] fields = clazz . getDeclaredFields ( ) ; for ( Field field : fields ) { if ( Modifier . isTransient ( field . getModifiers ( ) ) || Modifier . isStatic ( field . getModifiers ( ) ) ) { continue ; // we have no use for transient or static fields } if ( hasFieldWithSameName ( field . getName ( ) , result ) ) { if ( ignoreDuplicates ) { continue ; } else { throw new InvalidTypesException ( "The field " + field + " is already contained in the hierarchy of the " + clazz + "." + "Please use unique field names through your classes hierarchy" ) ; } } result . add ( field ) ; } clazz = clazz . getSuperclass ( ) ; } return result ;
public class TrmMeConnectRequestImpl { /** * Get the requesting ME UUID from the message . * Javadoc description supplied by TrmMeConnectRequest interface . */ public SIBUuid8 getRequestingMeUuid ( ) { } }
byte [ ] b = ( byte [ ] ) jmo . getField ( TrmFirstContactAccess . BODY_MECONNECTREQUEST_REQUESTINGMEUUID ) ; if ( b != null ) return new SIBUuid8 ( b ) ; return null ;
public class MarketplaceAgreementsInner { /** * Get marketplace terms . * @ param publisherId Publisher identifier string of image being deployed . * @ param offerId Offer identifier string of image being deployed . * @ param planId Plan identifier string of image being deployed . * @ param serviceCallback the async ServiceCallback to handle successful and failed responses . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the { @ link ServiceFuture } object */ public ServiceFuture < AgreementTermsInner > getAsync ( String publisherId , String offerId , String planId , final ServiceCallback < AgreementTermsInner > serviceCallback ) { } }
return ServiceFuture . fromResponse ( getWithServiceResponseAsync ( publisherId , offerId , planId ) , serviceCallback ) ;
public class Directory { /** * Initializes the directory with content from the initial ldif file . Note that a partition has to be created for * the root of the ldif file . * @ throws IOException * if the resource pointing to the ldif file to be imported can not be accessed */ private void importInitialLdif ( ) throws IOException { } }
if ( this . initialLdif != null ) { try ( InputStream ldifStream = this . initialLdif . openStream ( ) ) { this . importLdif ( ldifStream ) ; } }
public class WebServiceConnector { /** * { @ inheritDoc } * @ return */ @ Override public Schema schema ( ) { } }
LOG . ok ( "Schema retrieving" ) ; final Provisioning provisioning = connection . getProvisioning ( ) ; if ( provisioning == null ) { throw new IllegalStateException ( "Web Service client not found" ) ; } if ( wsAttributes != null ) { wsAttributes . clear ( ) ; } wsAttributes = new HashMap < String , WSAttribute > ( ) ; final Set < AttributeInfo > attributes = new HashSet < AttributeInfo > ( ) ; final List < WSAttribute > wsAttrs = provisioning . schema ( ) ; for ( WSAttribute attribute : wsAttrs ) { wsAttributes . put ( getAttributeName ( attribute ) , attribute ) ; if ( LOG . isOk ( ) ) { LOG . ok ( "\nAttribute: " + "\n\tName: " + attribute . getName ( ) + "\n\tType: " + attribute . getType ( ) + "\n\tIsKey: " + attribute . isKey ( ) + "\n\tIsPassword: " + attribute . isPassword ( ) + "\n\tIsNullable: " + attribute . isNullable ( ) ) ; } try { attributes . add ( buildAttribute ( attribute ) ) ; } catch ( IllegalArgumentException ila ) { LOG . error ( "Invalid attribute " + attribute . getName ( ) , ila ) ; } } final SchemaBuilder schemaBld = new SchemaBuilder ( getClass ( ) ) ; final ObjectClassInfoBuilder objectclassInfoBuilder = new ObjectClassInfoBuilder ( ) ; objectclassInfoBuilder . setType ( ObjectClass . ACCOUNT_NAME ) ; objectclassInfoBuilder . addAllAttributeInfo ( attributes ) ; final ObjectClassInfo objectclassInfo = objectclassInfoBuilder . build ( ) ; schemaBld . defineObjectClass ( objectclassInfo ) ; /* * Note : AuthenticateOp , and all the ' SPIOperation ' - s are by default added by Reflection API to the Schema . * See for details : FrameworkUtil . getDefaultSupportedOperations ( ) ReflectionUtil . getAllInterfaces ( connector ) ; is * the line that * does * acquire the implemented interfaces by the connector class . */ if ( ! provisioning . isAuthenticationSupported ( ) ) { LOG . ok ( "Authentication is not supported." ) ; schemaBld . removeSupportedObjectClass ( AuthenticateOp . class , objectclassInfo ) ; } if ( ! provisioning . isSyncSupported ( ) ) { LOG . ok ( "Synchronization is not supported." ) ; schemaBld . removeSupportedObjectClass ( SyncOp . class , objectclassInfo ) ; } schema = schemaBld . build ( ) ; return schema ;
public class CachingAndArtifactsManager { /** * Ensures that the process definition is cached in the appropriate places , including the * deployment ' s collection of deployed artifacts and the deployment manager ' s cache , as well * as caching any ProcessDefinitionInfos . */ public void updateCachingAndArtifacts ( ParsedDeployment parsedDeployment ) { } }
CommandContext commandContext = Context . getCommandContext ( ) ; final ProcessEngineConfigurationImpl processEngineConfiguration = Context . getProcessEngineConfiguration ( ) ; DeploymentCache < ProcessDefinitionCacheEntry > processDefinitionCache = processEngineConfiguration . getDeploymentManager ( ) . getProcessDefinitionCache ( ) ; DeploymentEntity deployment = parsedDeployment . getDeployment ( ) ; for ( ProcessDefinitionEntity processDefinition : parsedDeployment . getAllProcessDefinitions ( ) ) { BpmnModel bpmnModel = parsedDeployment . getBpmnModelForProcessDefinition ( processDefinition ) ; Process process = parsedDeployment . getProcessModelForProcessDefinition ( processDefinition ) ; ProcessDefinitionCacheEntry cacheEntry = new ProcessDefinitionCacheEntry ( processDefinition , bpmnModel , process ) ; processDefinitionCache . add ( processDefinition . getId ( ) , cacheEntry ) ; addDefinitionInfoToCache ( processDefinition , processEngineConfiguration , commandContext ) ; // Add to deployment for further usage deployment . addDeployedArtifact ( processDefinition ) ; }
public class DBEntitySequenceFactory { /** * Fetches the scalar values of the specified entity . * Also fetches the scalar values of other entities to be returned by the iterators of the same category * Uses { @ link # multiget _ slice ( List , ColumnParent , SlicePredicate ) } method with the ' slice list ' parameter to perform bulk fetch * @ param tableDef entity type * @ param caller next entity to be returned by the iterator ( must be initialized first ) * @ param scalarFields list of the fields to be fetched * @ param options defines now many entities should be initialized */ void initializeScalarFields ( DBEntity caller , List < String > scalarFields , DBEntitySequenceOptions options ) { } }
TableDefinition tableDef = caller . getTableDef ( ) ; String category = toEntityCategory ( tableDef . getTableName ( ) , scalarFields ) ; LRUCache < ObjectID , Map < String , String > > cache = getScalarCache ( category ) ; Set < ObjectID > idSet = new HashSet < ObjectID > ( ) ; List < DBEntity > entities = collectUninitializedEntities ( caller , cache , idSet , options . adjustEntityBuffer ( cache ) ) ; if ( idSet . size ( ) == 0 ) { // all requested scalar values have been found in the cache , no fetching is required return ; } Map < ObjectID , Map < String , String > > fetchResult = fetchScalarFields ( tableDef , idSet , scalarFields , category ) ; for ( Map . Entry < ObjectID , Map < String , String > > entry : fetchResult . entrySet ( ) ) { cache . put ( entry . getKey ( ) , entry . getValue ( ) ) ; } // Initialize the entities with the cached scalar values for ( DBEntity entity : entities ) { ObjectID key = entity . id ( ) ; Map < String , String > values = cache . get ( key ) ; if ( values == null ) { values = new HashMap < String , String > ( ) ; } entity . initialize ( values ) ; }
public class JNDIEntry { /** * Unregisters a service if one was registered * @ param context */ protected synchronized void deactivate ( ComponentContext context ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "Unregistering JNDIEntry " + serviceRegistration ) ; } if ( this . serviceRegistration != null ) { this . serviceRegistration . unregister ( ) ; }
public class JvmSpecializedTypeReferenceImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public NotificationChain basicSetEquivalent ( JvmTypeReference newEquivalent , NotificationChain msgs ) { } }
JvmTypeReference oldEquivalent = equivalent ; equivalent = newEquivalent ; if ( eNotificationRequired ( ) ) { ENotificationImpl notification = new ENotificationImpl ( this , Notification . SET , TypesPackage . JVM_SPECIALIZED_TYPE_REFERENCE__EQUIVALENT , oldEquivalent , newEquivalent ) ; if ( msgs == null ) msgs = notification ; else msgs . add ( notification ) ; } return msgs ;
public class HttpRequester { /** * Build name value pare list list . * @ param keyValueMap the key value map * @ return the list */ public static List < NameValuePair > buildNameValuePareList ( Map < String , String > keyValueMap ) { } }
return keyValueMap . entrySet ( ) . stream ( ) . map ( entry -> new BasicNameValuePair ( entry . getKey ( ) , entry . getValue ( ) ) ) . collect ( toList ( ) ) ;
public class CustomVisionTrainingManager { /** * Initializes an instance of Custom Vision Training API client . * @ param restClient the REST client to connect to Azure . * @ param apiKey the Custom Vision Training API key * @ return the Custom Vision Training API client */ public static TrainingApi authenticate ( RestClient restClient , final String apiKey ) { } }
return new TrainingApiImpl ( restClient ) . withApiKey ( apiKey ) ;
public class HttpRestartServer { /** * Handle a server request . * @ param request the request * @ param response the response * @ throws IOException in case of I / O errors */ public void handle ( ServerHttpRequest request , ServerHttpResponse response ) throws IOException { } }
try { Assert . state ( request . getHeaders ( ) . getContentLength ( ) > 0 , "No content" ) ; ObjectInputStream objectInputStream = new ObjectInputStream ( request . getBody ( ) ) ; ClassLoaderFiles files = ( ClassLoaderFiles ) objectInputStream . readObject ( ) ; objectInputStream . close ( ) ; this . server . updateAndRestart ( files ) ; response . setStatusCode ( HttpStatus . OK ) ; } catch ( Exception ex ) { logger . warn ( "Unable to handler restart server HTTP request" , ex ) ; response . setStatusCode ( HttpStatus . INTERNAL_SERVER_ERROR ) ; }