signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class PackageManagerUtils { /** * Checks if the device has a gyroscope sensor . * @ param context the context . * @ return { @ code true } if the device has a gyroscope sensor . */ @ TargetApi ( Build . VERSION_CODES . GINGERBREAD ) public static boolean hasGyroscopeSensorFeature ( Context context ) { } }
return hasGyroscopeSensorFeature ( context . getPackageManager ( ) ) ;
public class TimeUtils { /** * Format time period pair to Duration String * @ param startTime start time * @ param endTime end time * @ return Duration String */ public static String formatDuration ( final long startTime , final long endTime ) { } }
if ( startTime == - 1 ) { return "-" ; } final long durationMS ; if ( endTime == - 1 ) { durationMS = System . currentTimeMillis ( ) - startTime ; } else { durationMS = endTime - startTime ; } long seconds = durationMS / 1000 ; if ( seconds < 60 ) { return seconds + " sec" ; } long minutes = seconds / 60 ; seconds %= 60 ; if ( minutes < 60 ) { return minutes + "m " + seconds + "s" ; } long hours = minutes / 60 ; minutes %= 60 ; if ( hours < 24 ) { return hours + "h " + minutes + "m " + seconds + "s" ; } final long days = hours / 24 ; hours %= 24 ; return days + "d " + hours + "h " + minutes + "m" ;
public class UcsApi { /** * Search for interactions based on elastic search query , using ucs 9 * @ param esSearchInteractionData ( required ) * @ return ApiSuccessResponse * @ throws ApiException If fail to call the API , e . g . server error or cannot deserialize the response body */ public ApiSuccessResponse elastiscSearchInteractions ( EsSearchInteractionData esSearchInteractionData ) throws ApiException { } }
ApiResponse < ApiSuccessResponse > resp = elastiscSearchInteractionsWithHttpInfo ( esSearchInteractionData ) ; return resp . getData ( ) ;
public class ObjectFactory { /** * Create an instance of { @ link JAXBElement } { @ code < } { @ link BigInteger } { @ code > } * @ param value * Java instance representing xml element ' s value . * @ return * the new instance of { @ link JAXBElement } { @ code < } { @ link BigInteger } { @ code > } */ @ XmlElementDecl ( namespace = "http://www.opengis.net/gml" , name = "rowIndex" ) public JAXBElement < BigInteger > createRowIndex ( BigInteger value ) { } }
return new JAXBElement < BigInteger > ( _RowIndex_QNAME , BigInteger . class , null , value ) ;
public class ConfigUtil { /** * Optionally get a configuration value . */ public static < T > Optional < T > get ( Config config , Function < String , T > getter , String path ) { } }
if ( ! config . hasPath ( path ) ) { return Optional . empty ( ) ; } return Optional . of ( getter . apply ( path ) ) ;
public class WebcamDiscoveryService { /** * Scan for newly added or already removed webcams . */ public void scan ( ) { } }
WebcamDiscoveryListener [ ] listeners = Webcam . getDiscoveryListeners ( ) ; List < WebcamDevice > tmpnew = driver . getDevices ( ) ; List < WebcamDevice > tmpold = null ; try { tmpold = getDevices ( getWebcams ( Long . MAX_VALUE , TimeUnit . MILLISECONDS ) ) ; } catch ( TimeoutException e ) { throw new WebcamException ( e ) ; } // convert to linked list due to O ( 1 ) on remove operation on // iterator versus O ( n ) for the same operation in array list List < WebcamDevice > oldones = new LinkedList < WebcamDevice > ( tmpold ) ; List < WebcamDevice > newones = new LinkedList < WebcamDevice > ( tmpnew ) ; Iterator < WebcamDevice > oi = oldones . iterator ( ) ; Iterator < WebcamDevice > ni = null ; WebcamDevice od = null ; // old device WebcamDevice nd = null ; // new device // reduce lists while ( oi . hasNext ( ) ) { od = oi . next ( ) ; ni = newones . iterator ( ) ; while ( ni . hasNext ( ) ) { nd = ni . next ( ) ; // remove both elements , if device name is the same , which // actually means that device is exactly the same if ( nd . getName ( ) . equals ( od . getName ( ) ) ) { ni . remove ( ) ; oi . remove ( ) ; break ; } } } // if any left in old ones it means that devices has been removed if ( oldones . size ( ) > 0 ) { List < Webcam > notified = new ArrayList < Webcam > ( ) ; for ( WebcamDevice device : oldones ) { for ( Webcam webcam : webcams ) { if ( webcam . getDevice ( ) . getName ( ) . equals ( device . getName ( ) ) ) { notified . add ( webcam ) ; break ; } } } setCurrentWebcams ( tmpnew ) ; for ( Webcam webcam : notified ) { notifyWebcamGone ( webcam , listeners ) ; webcam . dispose ( ) ; } } // if any left in new ones it means that devices has been added if ( newones . size ( ) > 0 ) { setCurrentWebcams ( tmpnew ) ; for ( WebcamDevice device : newones ) { for ( Webcam webcam : webcams ) { if ( webcam . getDevice ( ) . getName ( ) . equals ( device . getName ( ) ) ) { notifyWebcamFound ( webcam , listeners ) ; break ; } } } }
public class FactorTable { /** * Computes the probabilities of the tag at the end of the table given that * the previous tag sequence in table is GIVEN . given is at the beginning , * position in question is at the end * @ return the probabilities of the tag at the end of the table */ public double [ ] conditionalLogProbsGivenPrevious ( int [ ] given ) { } }
if ( given . length != windowSize - 1 ) { throw new IllegalArgumentException ( "conditionalLogProbsGivenPrevious requires given one less than clique size (" + windowSize + ") but was " + Arrays . toString ( given ) ) ; } double [ ] result = new double [ numClasses ] ; for ( int i = 0 ; i < numClasses ; i ++ ) { int index = indexOf ( given , i ) ; result [ i ] = table [ index ] ; } ArrayMath . logNormalize ( result ) ; return result ;
public class ExcludeImpl { /** * If not already created , a new < code > if - system - property < / code > element will be created and returned . * Otherwise , the first existing < code > if - system - property < / code > element will be returned . * @ return the instance defined for the element < code > if - system - property < / code > */ public IfSystemProperty < Exclude < T > > getOrCreateIfSystemProperty ( ) { } }
List < Node > nodeList = childNode . get ( "if-system-property" ) ; if ( nodeList != null && nodeList . size ( ) > 0 ) { return new IfSystemPropertyImpl < Exclude < T > > ( this , "if-system-property" , childNode , nodeList . get ( 0 ) ) ; } return createIfSystemProperty ( ) ;
public class JFapOutboundConnLink { /** * begin D181601 */ public void close ( VirtualConnection vc , Exception e ) { } }
if ( tc . isEntryEnabled ( ) ) SibTr . entry ( this , tc , "close" , new Object [ ] { vc , e } ) ; if ( tc . isEventEnabled ( ) && ( e != null ) ) SibTr . exception ( this , tc , e ) ; super . close ( vc , e ) ; if ( tc . isEntryEnabled ( ) ) SibTr . exit ( this , tc , "close" ) ;
public class DynamicURLClassLoader { /** * Appends the specified URL to the list of URLs to search for * classes and resources . * @ param url the URL to be added to the search path of URLs */ public void addURL ( final URL url ) { } }
AccessController . doPrivileged ( new PrivilegedAction < Object > ( ) { @ Override public Object run ( ) { DynamicURLClassLoader . this . ucp . addURL ( url ) ; return null ; } } , this . acc ) ;
public class ThreadNameInfo { /** * Format the thread name string . * < ul > * < li > { @ code % % } - emit a percent sign < / li > * < li > { @ code % t } - emit the per - factory thread sequence number < / li > * < li > { @ code % g } - emit the global thread sequence number < / li > * < li > { @ code % f } - emit the factory sequence number < / li > * < li > { @ code % p } - emit the { @ code " : " } - separated thread group path < / li > * < li > { @ code % i } - emit the thread ID < / li > * < li > { @ code % G } - emit the thread group name < / li > * < li > { @ code % n } - emit the node name < / li > * < li > { @ code % c } - emit the component name < / li > * < / ul > * @ param thread the thread * @ param formatString the format string * @ return the thread name string */ public String format ( Thread thread , String formatString ) { } }
final StringBuilder builder = new StringBuilder ( formatString . length ( ) * 5 ) ; final ThreadGroup group = thread . getThreadGroup ( ) ; final Matcher matcher = searchPattern . matcher ( formatString ) ; while ( matcher . find ( ) ) { if ( matcher . group ( 1 ) != null ) { builder . append ( matcher . group ( ) ) ; } else { switch ( matcher . group ( ) . charAt ( 1 ) ) { case '%' : builder . append ( '%' ) ; break ; case 't' : builder . append ( perFactoryThreadSequenceNum ) ; break ; case 'g' : builder . append ( globalThreadSequenceNum ) ; break ; case 'f' : builder . append ( factorySequenceNum ) ; break ; case 'p' : if ( group != null ) appendGroupPath ( group , builder ) ; break ; case 'i' : builder . append ( thread . getId ( ) ) ; break ; case 'G' : if ( group != null ) builder . append ( group . getName ( ) ) ; break ; case 'n' : if ( node != null ) builder . append ( node ) ; break ; case 'c' : if ( component != null ) builder . append ( component ) ; break ; } } } return builder . toString ( ) ;
public class WebUtilities { /** * Attempts to find the nearest component ( may be the component itself ) that is assignable to the given class . * @ param clazz the class to look for * @ param comp the component to start at . * @ return the component or matching ancestor , if found , otherwise null . * @ param < T > the class to find */ public static < T > T getClosestOfClass ( final Class < T > clazz , final WComponent comp ) { } }
if ( comp == null ) { return null ; } if ( clazz . isInstance ( comp ) ) { return ( T ) comp ; } return getAncestorOfClass ( clazz , comp ) ;
public class DRL6Expressions { /** * $ ANTLR start synpred33 _ DRL6Expressions */ public final void synpred33_DRL6Expressions_fragment ( ) throws RecognitionException { } }
// src / main / resources / org / drools / compiler / lang / DRL6Expressions . g : 591:15 : ( DOT ID ) // src / main / resources / org / drools / compiler / lang / DRL6Expressions . g : 591:16 : DOT ID { match ( input , DOT , FOLLOW_DOT_in_synpred33_DRL6Expressions3234 ) ; if ( state . failed ) return ; match ( input , ID , FOLLOW_ID_in_synpred33_DRL6Expressions3236 ) ; if ( state . failed ) return ; }
public class AdaptiveGrid { /** * Updates the grid limits and the grid content adding the solutions contained * in a specific < code > solutionList < / code > . * @ param solutionList The < code > solutionList < / code > . */ public void updateGrid ( List < S > solutionList ) { } }
// Update lower and upper limits updateLimits ( solutionList ) ; // Calculate the division size for ( int obj = 0 ; obj < numberOfObjectives ; obj ++ ) { divisionSize [ obj ] = gridUpperLimits [ obj ] - gridLowerLimits [ obj ] ; } // Clean the hypercubes for ( int i = 0 ; i < hypercubes . length ; i ++ ) { hypercubes [ i ] = 0 ; } // Add the population addSolutionSet ( solutionList ) ;
public class ImageStatistics { /** * Returns the sum of all the pixels in the image . * @ param img Input image . Not modified . */ public static int sum ( InterleavedU16 img ) { } }
if ( BoofConcurrency . USE_CONCURRENT ) { return ImplImageStatistics_MT . sum ( img ) ; } else { return ImplImageStatistics . sum ( img ) ; }
public class Traverson { /** * Follow multiple link - relation types , one by one . The { @ link LinkPredicates predicate } is used to select * the matching links , if there are more than one per link - relation type . * Templated links are expanded to URIs using the specified template variables . * Embedded items are used instead of resolving links , if present in the returned HAL documents . * @ param rels the link - relation types of the followed links * @ param predicate the predicate used to select the link to follow * @ param vars uri - template variables used to build links . * @ return this * @ since 1.0.0 */ public Traverson follow ( final List < String > rels , final Predicate < Link > predicate , final Map < String , Object > vars ) { } }
for ( String rel : rels ) { follow ( rel , predicate , vars ) ; } return this ;
public class StatsBuffer { /** * Reset our local state : All values are set to 0. */ public void reset ( ) { } }
statsComputed . set ( false ) ; pos = 0 ; curSize = 0 ; total = 0L ; mean = 0.0 ; variance = 0.0 ; stddev = 0.0 ; min = 0L ; max = 0L ; for ( int i = 0 ; i < percentileValues . length ; ++ i ) { percentileValues [ i ] = 0.0 ; }
public class TransactionImpl { /** * Remove colProxy from list of pending collections and * register its contents with the transaction . */ public void afterLoading ( CollectionProxyDefaultImpl colProxy ) { } }
if ( log . isDebugEnabled ( ) ) log . debug ( "loading a proxied collection a collection: " + colProxy ) ; Collection data = colProxy . getData ( ) ; for ( Iterator iterator = data . iterator ( ) ; iterator . hasNext ( ) ; ) { Object o = iterator . next ( ) ; if ( ! isOpen ( ) ) { log . error ( "Collection proxy materialization outside of a running tx, obj=" + o ) ; try { throw new Exception ( "Collection proxy materialization outside of a running tx, obj=" + o ) ; } catch ( Exception e ) { e . printStackTrace ( ) ; } } else { Identity oid = getBroker ( ) . serviceIdentity ( ) . buildIdentity ( o ) ; ClassDescriptor cld = getBroker ( ) . getClassDescriptor ( ProxyHelper . getRealClass ( o ) ) ; RuntimeObject rt = new RuntimeObject ( o , oid , cld , false , ProxyHelper . isProxy ( o ) ) ; lockAndRegister ( rt , Transaction . READ , isImplicitLocking ( ) , getRegistrationList ( ) ) ; } } unregisterFromCollectionProxy ( colProxy ) ;
public class JavaBean { /** * Set property * @ param bean the bean * @ param propertyName the property name * @ param value the value to set */ public static void setProperty ( Object bean , String propertyName , Object value ) { } }
setProperty ( bean , propertyName , value , true ) ;
public class BigtableAsyncAdmin { /** * / * ( non - Javadoc ) * @ see org . apache . hadoop . hbase . client . AsyncAdmin # truncateTable ( org . apache . hadoop . hbase . TableName , boolean ) */ @ Override public CompletableFuture < Void > truncateTable ( TableName tableName , boolean preserveSplits ) { } }
if ( ! preserveSplits ) { LOG . info ( "truncate will preserveSplits. The passed in variable is ignored." ) ; } return toCompletableFuture ( bigtableTableAdminClient . dropAllRowsAsync ( tableName . getNameAsString ( ) ) ) ;
public class ContextId { /** * parses a ContextId object from a Map - representation used in * { @ link org . openengsb . core . api . persistence . ConfigPersistenceService } */ public static ContextId fromMetaData ( Map < String , String > metaData ) { } }
return new ContextId ( metaData . get ( META_KEY_ID ) ) ;
public class ISUPMessageFactoryImpl { /** * ( non - Javadoc ) * @ see org . restcomm . protocols . ss7 . isup . ISUPMessageFactory # createREL ( int cic ) */ public ReleaseMessage createREL ( int cic ) { } }
ReleaseMessage msg = createREL ( ) ; CircuitIdentificationCode code = this . parameterFactory . createCircuitIdentificationCode ( ) ; code . setCIC ( cic ) ; msg . setCircuitIdentificationCode ( code ) ; return msg ;
public class BindDictionary { /** * Base field methods */ public BaseField < T > addBaseField ( int viewResId ) { } }
BaseField < T > field = new BaseField < T > ( viewResId ) ; mBaseFields . add ( field ) ; return field ;
public class BaseStreamingClient { /** * @ param stores - the list of name of the stores to be streamed to * @ param checkpointCallback - the callback that allows for the user to * record the progress , up to the last event delivered . This callable * would be invoked every so often internally . * @ param recoveryCallback - the callback that allows the user to rewind the * upstream to the position recorded by the last complete call on * checkpointCallback whenever an exception occurs during the * streaming session . * @ param allowMerge - whether to allow for the streaming event to be merged * with online writes . If not , all online writes since the completion * of the last streaming session will be lost at the end of the * current streaming session . * @ param blackListedNodes - the list of Nodes not to stream to ; we can * probably recover them later from the replicas */ @ SuppressWarnings ( { } }
"unchecked" , "rawtypes" } ) public synchronized void initStreamingSessions ( List < String > stores , Callable checkpointCallback , Callable recoveryCallback , boolean allowMerge , List < Integer > blackListedNodes ) { logger . info ( "Initializing a streaming session" ) ; this . checkpointCallback = checkpointCallback ; this . recoveryCallback = recoveryCallback ; this . allowMerge = allowMerge ; streamingresults = Executors . newFixedThreadPool ( 3 ) ; entriesProcessed = 0 ; newBatch = true ; isMultiSession = true ; this . throttler = new EventThrottler ( THROTTLE_QPS ) ; TimeUnit unit = TimeUnit . SECONDS ; Collection < Node > nodesInCluster = adminClient . getAdminClientCluster ( ) . getNodes ( ) ; if ( blackListedNodes != null && blackListedNodes . size ( ) > 0 ) { this . blackListedNodes = blackListedNodes ; } for ( Node node : nodesInCluster ) { if ( blackListedNodes != null && blackListedNodes . size ( ) > 0 ) { if ( ! blackListedNodes . contains ( node . getId ( ) ) ) { nodesToStream . add ( node ) ; } } else nodesToStream . add ( node ) ; } // socket pool streamingSocketPool = new SocketPool ( adminClient . getAdminClientCluster ( ) . getNumberOfNodes ( ) * MAX_STORES_PER_SESSION , ( int ) unit . toMillis ( adminClientConfig . getAdminConnectionTimeoutSec ( ) ) , ( int ) unit . toMillis ( adminClientConfig . getAdminSocketTimeoutSec ( ) ) , adminClientConfig . getAdminSocketBufferSize ( ) , adminClientConfig . getAdminSocketKeepAlive ( ) ) ; nodeIdStoreToSocketRequest = new HashMap ( ) ; nodeIdStoreToOutputStreamRequest = new HashMap ( ) ; nodeIdStoreToInputStreamRequest = new HashMap ( ) ; nodeIdStoreInitialized = new HashMap ( ) ; storeToRoutingStrategy = new HashMap ( ) ; nodeIdStoreToSocketAndStreams = new HashMap ( ) ; for ( String store : stores ) { addStoreToSession ( store ) ; }
public class EncryptUtil { /** * 获取HmacSHA1签名 < / br > * @ param data * 数据 * @ param key * 密钥 * @ return 签名后的字符 * @ throws Exception * 签名失败 */ public static String hmacSHA1 ( String data , String key ) { } }
StringBuilder sb = new StringBuilder ( ) ; try { byte [ ] keyBytes = key . getBytes ( ) ; SecretKeySpec signingKey = new SecretKeySpec ( keyBytes , "HmacSHA1" ) ; Mac mac = Mac . getInstance ( "HmacSHA1" ) ; mac . init ( signingKey ) ; byte [ ] rawHmac = mac . doFinal ( data . getBytes ( ) ) ; for ( byte b : rawHmac ) { sb . append ( byteToHexString ( b ) ) ; } } catch ( Exception e ) { throw new RuntimeException ( "获取HmacSHA1签名失败[" + e . getMessage ( ) + "]!" , e ) ; } return sb . toString ( ) ;
public class EventFilterParser { /** * EventFilter . g : 155:1 : regex _ predicate : path _ function MATCHES STRING - > ^ ( MATCHES path _ function STRING ) ; */ public final EventFilterParser . regex_predicate_return regex_predicate ( ) throws RecognitionException { } }
EventFilterParser . regex_predicate_return retval = new EventFilterParser . regex_predicate_return ( ) ; retval . start = input . LT ( 1 ) ; CommonTree root_0 = null ; Token MATCHES75 = null ; Token STRING76 = null ; EventFilterParser . path_function_return path_function74 = null ; CommonTree MATCHES75_tree = null ; CommonTree STRING76_tree = null ; RewriteRuleTokenStream stream_MATCHES = new RewriteRuleTokenStream ( adaptor , "token MATCHES" ) ; RewriteRuleTokenStream stream_STRING = new RewriteRuleTokenStream ( adaptor , "token STRING" ) ; RewriteRuleSubtreeStream stream_path_function = new RewriteRuleSubtreeStream ( adaptor , "rule path_function" ) ; try { // EventFilter . g : 156:2 : ( path _ function MATCHES STRING - > ^ ( MATCHES path _ function STRING ) ) // EventFilter . g : 156:6 : path _ function MATCHES STRING { pushFollow ( FOLLOW_path_function_in_regex_predicate909 ) ; path_function74 = path_function ( ) ; state . _fsp -- ; stream_path_function . add ( path_function74 . getTree ( ) ) ; MATCHES75 = ( Token ) match ( input , MATCHES , FOLLOW_MATCHES_in_regex_predicate911 ) ; stream_MATCHES . add ( MATCHES75 ) ; STRING76 = ( Token ) match ( input , STRING , FOLLOW_STRING_in_regex_predicate913 ) ; stream_STRING . add ( STRING76 ) ; // AST REWRITE // elements : MATCHES , path _ function , STRING // token labels : // rule labels : retval // token list labels : // rule list labels : // wildcard labels : retval . tree = root_0 ; RewriteRuleSubtreeStream stream_retval = new RewriteRuleSubtreeStream ( adaptor , "rule retval" , retval != null ? retval . tree : null ) ; root_0 = ( CommonTree ) adaptor . nil ( ) ; // 156:35 : - > ^ ( MATCHES path _ function STRING ) { // EventFilter . g : 156:38 : ^ ( MATCHES path _ function STRING ) { CommonTree root_1 = ( CommonTree ) adaptor . nil ( ) ; root_1 = ( CommonTree ) adaptor . becomeRoot ( new MatchesTreeNode ( stream_MATCHES . nextToken ( ) ) , root_1 ) ; adaptor . addChild ( root_1 , stream_path_function . nextTree ( ) ) ; adaptor . addChild ( root_1 , new StringTreeNode ( stream_STRING . nextToken ( ) ) ) ; adaptor . addChild ( root_0 , root_1 ) ; } } retval . tree = root_0 ; } retval . stop = input . LT ( - 1 ) ; retval . tree = ( CommonTree ) adaptor . rulePostProcessing ( root_0 ) ; adaptor . setTokenBoundaries ( retval . tree , retval . start , retval . stop ) ; } catch ( RecognitionException re ) { reportError ( re ) ; recover ( input , re ) ; retval . tree = ( CommonTree ) adaptor . errorNode ( input , retval . start , input . LT ( - 1 ) , re ) ; } finally { // do for sure before leaving } return retval ;
public class ListDocumentsRequest { /** * One or more filters . Use a filter to return a more specific list of results . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setDocumentFilterList ( java . util . Collection ) } or { @ link # withDocumentFilterList ( java . util . Collection ) } if * you want to override the existing values . * @ param documentFilterList * One or more filters . Use a filter to return a more specific list of results . * @ return Returns a reference to this object so that method calls can be chained together . */ public ListDocumentsRequest withDocumentFilterList ( DocumentFilter ... documentFilterList ) { } }
if ( this . documentFilterList == null ) { setDocumentFilterList ( new com . amazonaws . internal . SdkInternalList < DocumentFilter > ( documentFilterList . length ) ) ; } for ( DocumentFilter ele : documentFilterList ) { this . documentFilterList . add ( ele ) ; } return this ;
public class OrmLiteDefaultContentProvider { /** * This method is called after the onInsert processing has been handled . If you ' re a need , * you can override this method . * @ param result * This is the return value of onInsert method . * @ param uri * This is the Uri of target . * @ param target * This is identical to the argument of onInsert method . * It is MatcherPattern objects that match to evaluate Uri by UriMatcher . You can * access information in the tables and columns , ContentUri , MimeType etc . * @ param parameter * This is identical to the argument of onInsert method . * Arguments passed to the insert ( ) method . * @ since 1.0.4 */ protected void onInsertCompleted ( Uri result , Uri uri , MatcherPattern target , InsertParameters parameter ) { } }
this . getContext ( ) . getContentResolver ( ) . notifyChange ( result , null ) ;
public class NamespaceService { /** * Updates a the members of a namespace . * @ param id The ID of the namespace to update . * @ param users The updated members of the namespace . * @ return The updated namespace . * @ throws IOException If the server cannot be reached . * @ throws TokenExpiredException If the token sent along with the request has expired */ public Namespace updateNamespaceMembers ( BigInteger id , Set < String > users ) throws IOException , TokenExpiredException { } }
String requestUrl = RESOURCE + "/" + id . toString ( ) + "/users" ; ArgusResponse response = getClient ( ) . executeHttpRequest ( ArgusHttpClient . RequestType . PUT , requestUrl , users ) ; assertValidResponse ( response , requestUrl ) ; return fromJson ( response . getResult ( ) , Namespace . class ) ;
public class CompletenessCalculator { /** * private boolean isCollectionSkippable ( * List < String > skippableIds , * JsonBranch collection , * int i , * JsonPathCache cache , * Object jsonFragment ) * boolean skippable = false ; * JsonBranch identifierPath = collection . getIdentifier ( ) ; * if ( ! skippableIds . isEmpty ( ) & & identifierPath ! = null ) { * String address = String . format ( " % s / % d / % s " , * collection . getJsonPath ( ) , i , identifierPath . getJsonPath ( ) ) ; * List < T > values = cache . get ( address , identifierPath . getJsonPath ( ) , jsonFragment ) ; * String id = ( skippedEntryChecker ! = null ) * ? skippedEntryChecker . extractId ( values . get ( 0 ) ) * : values . get ( 0 ) . getValue ( ) ; * skippable = skippableIds . contains ( id ) ; * return skippable ; */ public void evaluateJsonBranch ( JsonBranch jsonBranch , JsonPathCache cache , CompletenessCounter completenessCounter , String address , Object jsonFragment ) { } }
List < T > values = cache . get ( address , jsonBranch . getJsonPath ( ) , jsonFragment ) ; handleValues ( completenessCounter , jsonBranch , values ) ;
public class ObjectManager { /** * @ param long the new Log filesize in bytes . */ public void setLogFileSize ( long newSize ) throws ObjectManagerException { } }
if ( Tracing . isAnyTracingEnabled ( ) && trace . isEntryEnabled ( ) ) trace . entry ( this , cclass , "seLogFileSize" ) ; objectManagerState . logOutput . setLogFileSize ( newSize ) ; if ( Tracing . isAnyTracingEnabled ( ) && trace . isEntryEnabled ( ) ) trace . exit ( this , cclass , "setLogFileSize" ) ;
public class BamUtils { /** * Write in wig file format the coverage for the region given . It uses fixedStep with step equals to 1. * @ param regionCoverage Region containing the coverage values * @ param span Span ( to group coverage contiguous values in a mean coverage ) * @ param header Flag , to write a header line ( assuming fixedStep , and start = 1 and step = 1) * @ param writer File writer */ public static void printWigFormatCoverage ( RegionCoverage regionCoverage , int span , boolean header , PrintWriter writer ) { } }
// sanity check if ( span < 1 ) { span = 1 ; } if ( header ) { writer . println ( "fixedStep chrom=" + regionCoverage . getChromosome ( ) + " start=1 step=1 span=" + span ) ; } float [ ] values = regionCoverage . getValues ( ) ; if ( span == 1 ) { for ( int i = 0 ; i < values . length ; i ++ ) { writer . println ( values [ i ] ) ; } } else { int counter = 0 ; int sum = 0 ; for ( int i = 0 ; i < values . length ; i ++ ) { counter ++ ; sum += values [ i ] ; if ( counter == span ) { writer . println ( sum / counter ) ; counter = 0 ; sum = 0 ; } } if ( counter > 0 ) { writer . println ( sum / counter ) ; } }
public class MediaOverlay { /** * Called by the { @ link FrameManager } to propagate our dirty regions to the active repaint * manager so that it can repaint the underlying components just prior to our painting our * media . This will be followed by a call to { @ link # paint } after the components have been * repainted . */ public void propagateDirtyRegions ( ActiveRepaintManager repmgr , JRootPane root ) { } }
if ( _metamgr . needsPaint ( ) ) { // tell the repaint manager about our raw ( unmerged ) dirty regions so that it can dirty // only components that are actually impacted List < Rectangle > dlist = _metamgr . getRegionManager ( ) . peekDirtyRegions ( ) ; for ( int ii = 0 , ll = dlist . size ( ) ; ii < ll ; ii ++ ) { Rectangle dirty = dlist . get ( ii ) ; repmgr . addDirtyRegion ( root , dirty . x - root . getX ( ) , dirty . y - root . getY ( ) , dirty . width , dirty . height ) ; } }
public class MapClearExpiredRecordsTask { /** * Here we check if that partition has any expirable record or not , * if no expirable record exists in that partition no need to fire * an expiration operation . * @ param partitionContainer corresponding partition container . * @ return < code > true < / code > if no expirable record in that * partition < code > false < / code > otherwise . */ @ Override protected boolean notHaveAnyExpirableRecord ( PartitionContainer partitionContainer ) { } }
boolean notExist = true ; final ConcurrentMap < String , RecordStore > maps = partitionContainer . getMaps ( ) ; for ( RecordStore store : maps . values ( ) ) { if ( store . isExpirable ( ) ) { notExist = false ; break ; } } return notExist ;
public class QueueListenerFactory { /** * Create a new { @ link QueueListenerFactory } . The provided { @ code scheduler } is used for listener notification and I / O * operations . * @ param scheduler a scheduler from rxjava for I / O operations * @ param disqueURI the DisqueURI * @ param codec use this codec to encode / decode keys and values , must note be { @ literal null } * @ param queues the queue names * @ param < K > Key type * @ param < V > Value type * @ return a new instance of { @ link QueueListenerFactory } */ public static < K , V > QueueListenerFactory < K , V > create ( Scheduler scheduler , DisqueURI disqueURI , RedisCodec < K , V > codec , K ... queues ) { } }
return new QueueListenerFactory < K , V > ( scheduler , disqueURI , codec , queues ) ;
public class MessageDetailScreen { /** * Sync the contact type record to the main value . */ public void syncContactTypeToMain ( ) { } }
ReferenceField fldContactType = ( ReferenceField ) this . getMainRecord ( ) . getField ( MessageDetail . CONTACT_TYPE_ID ) ; String strContactTypeParam = fldContactType . getFieldName ( ) ; this . syncRecordToMainField ( fldContactType , null , strContactTypeParam ) ;
public class autoscaleprofile { /** * Use this API to fetch filtered set of autoscaleprofile resources . * filter string should be in JSON format . eg : " port : 80 , servicetype : HTTP " . */ public static autoscaleprofile [ ] get_filtered ( nitro_service service , String filter ) throws Exception { } }
autoscaleprofile obj = new autoscaleprofile ( ) ; options option = new options ( ) ; option . set_filter ( filter ) ; autoscaleprofile [ ] response = ( autoscaleprofile [ ] ) obj . getfiltered ( service , option ) ; return response ;
public class Checker { /** * < editor - fold defaultstate = " collapsed " desc = " HTML attributes " > */ @ Override @ SuppressWarnings ( "fallthrough" ) public Void visitAttribute ( AttributeTree tree , Void ignore ) { } }
HtmlTag currTag = tagStack . peek ( ) . tag ; if ( currTag != null ) { Name name = tree . getName ( ) ; HtmlTag . Attr attr = currTag . getAttr ( name ) ; if ( attr != null ) { boolean first = tagStack . peek ( ) . attrs . add ( attr ) ; if ( ! first ) env . messages . error ( HTML , tree , "dc.attr.repeated" , name ) ; } // for now , doclint allows all attribute names beginning with " on " as event handler names , // without checking the validity or applicability of the name if ( ! name . toString ( ) . startsWith ( "on" ) ) { AttrKind k = currTag . getAttrKind ( name ) ; switch ( k ) { case OK : break ; case INVALID : env . messages . error ( HTML , tree , "dc.attr.unknown" , name ) ; break ; case OBSOLETE : env . messages . warning ( ACCESSIBILITY , tree , "dc.attr.obsolete" , name ) ; break ; case USE_CSS : env . messages . warning ( ACCESSIBILITY , tree , "dc.attr.obsolete.use.css" , name ) ; break ; } } if ( attr != null ) { switch ( attr ) { case NAME : if ( currTag != HtmlTag . A ) { break ; } // fallthrough case ID : String value = getAttrValue ( tree ) ; if ( value == null ) { env . messages . error ( HTML , tree , "dc.anchor.value.missing" ) ; } else { if ( ! validName . matcher ( value ) . matches ( ) ) { env . messages . error ( HTML , tree , "dc.invalid.anchor" , value ) ; } if ( ! checkAnchor ( value ) ) { env . messages . error ( HTML , tree , "dc.anchor.already.defined" , value ) ; } } break ; case HREF : if ( currTag == HtmlTag . A ) { String v = getAttrValue ( tree ) ; if ( v == null || v . isEmpty ( ) ) { env . messages . error ( HTML , tree , "dc.attr.lacks.value" ) ; } else { Matcher m = docRoot . matcher ( v ) ; if ( m . matches ( ) ) { String rest = m . group ( 2 ) ; if ( ! rest . isEmpty ( ) ) checkURI ( tree , rest ) ; } else { checkURI ( tree , v ) ; } } } break ; case VALUE : if ( currTag == HtmlTag . LI ) { String v = getAttrValue ( tree ) ; if ( v == null || v . isEmpty ( ) ) { env . messages . error ( HTML , tree , "dc.attr.lacks.value" ) ; } else if ( ! validNumber . matcher ( v ) . matches ( ) ) { env . messages . error ( HTML , tree , "dc.attr.not.number" ) ; } } break ; } } } // TODO : basic check on value return super . visitAttribute ( tree , ignore ) ;
public class DevicesApi { /** * Get Device Token * Retrieves a device & # 39 ; s token * @ param deviceId deviceId ( required ) * @ return DeviceTokenEnvelope * @ throws ApiException If fail to call the API , e . g . server error or cannot deserialize the response body */ public DeviceTokenEnvelope getDeviceToken ( String deviceId ) throws ApiException { } }
ApiResponse < DeviceTokenEnvelope > resp = getDeviceTokenWithHttpInfo ( deviceId ) ; return resp . getData ( ) ;
public class ApiOvhOrder { /** * Get prices and contracts information * REST : GET / order / cloud / project / { serviceName } / credit * @ param amount [ required ] Amount to add in your cloud credit * @ param serviceName [ required ] The project id */ public OvhOrder cloud_project_serviceName_credit_GET ( String serviceName , Long amount ) throws IOException { } }
String qPath = "/order/cloud/project/{serviceName}/credit" ; StringBuilder sb = path ( qPath , serviceName ) ; query ( sb , "amount" , amount ) ; String resp = exec ( qPath , "GET" , sb . toString ( ) , null ) ; return convertTo ( resp , OvhOrder . class ) ;
public class AttrResultInfo { /** * Get the text representation * @ return The string */ public String getAsText ( ) { } }
if ( throwable != null ) return throwable . toString ( ) ; if ( result != null ) { try { if ( editor != null ) { editor . setValue ( result ) ; return editor . getAsText ( ) ; } else { return result . toString ( ) ; } } catch ( Exception e ) { return "String representation of " + name + "unavailable" ; } } return null ;
public class LocationUtils { /** * Add a { @ link LocationFinder } to the list of finders that will be queried * for an object ' s location by { @ link # getLocation ( Object , String ) } . * < b > Important : < / b > LocationUtils internally stores a weak reference to the * finder . This avoids creating strong links between the classloader holding * this class and the finder ' s classloader , which can cause some weird * memory leaks if the finder ' s classloader is to be reloaded . Therefore , * you < em > have < / em > to keep a strong reference to the finder in the calling * code , e . g . : * < pre > * private static LocationUtils . LocationFinder myFinder = * new LocationUtils . LocationFinder ( ) { * public Location getLocation ( Object obj , String desc ) { * static { * LocationUtils . addFinder ( myFinder ) ; * < / pre > * @ param finder * the location finder to add */ public static void addFinder ( LocationFinder finder ) { } }
if ( finder == null ) { return ; } synchronized ( LocationFinder . class ) { List < WeakReference < LocationFinder > > newFinders = new ArrayList < WeakReference < LocationFinder > > ( finders ) ; newFinders . add ( new WeakReference < LocationFinder > ( finder ) ) ; finders = newFinders ; }
public class IfcPhysicalQuantityImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ SuppressWarnings ( "unchecked" ) @ Override public EList < IfcPhysicalComplexQuantity > getPartOfComplex ( ) { } }
return ( EList < IfcPhysicalComplexQuantity > ) eGet ( Ifc4Package . Literals . IFC_PHYSICAL_QUANTITY__PART_OF_COMPLEX , true ) ;
public class DateTimeParseContext { /** * Gets the effective chronology during parsing . * @ return the effective parsing chronology , not null */ Chronology getEffectiveChronology ( ) { } }
Chronology chrono = currentParsed ( ) . chrono ; if ( chrono == null ) { chrono = overrideChronology ; if ( chrono == null ) { chrono = IsoChronology . INSTANCE ; } } return chrono ;
public class ConfigLoader { /** * Builds a configuration class based on XML file * @ param file * @ return */ private static JCorsConfig loadFromFileStream ( InputStream fileStream ) { } }
try { JAXBContext context = JAXBContext . newInstance ( ConfigBuilder . class ) ; Unmarshaller unmarshaller = context . createUnmarshaller ( ) ; unmarshaller . setSchema ( loadXmlSchema ( ) ) ; ConfigBuilder configBuilder = ( ConfigBuilder ) unmarshaller . unmarshal ( fileStream ) ; Constraint . ensureNotNull ( configBuilder , "It was not possible to get a valid configuration builder instance" ) ; return configBuilder . buildConfig ( ) ; } catch ( Exception e ) { log . error ( "Failed loading configuration file" , e ) ; return null ; }
public class AbstractCompiler { /** * httl . properties : compile . directory = classes */ public void setCompileDirectory ( String directory ) { } }
if ( directory != null && directory . trim ( ) . length ( ) > 0 ) { File file = new File ( directory ) ; if ( file . exists ( ) || file . mkdirs ( ) ) { this . compileDirectory = file ; } }
public class ProcessContextProperties { /** * Data Usage */ public void setDataUsage ( String activity , Map < String , Set < DataUsage > > dataUsage ) throws PropertyException { } }
Validate . notNull ( activity ) ; Validate . notEmpty ( activity ) ; Validate . notNull ( dataUsage ) ; Validate . noNullElements ( dataUsage . keySet ( ) ) ; Validate . noNullElements ( dataUsage . values ( ) ) ; // 1 . Add data usages // This also adds the data usages to the list of data usages List < String > propertyNamesForDataUsages = new ArrayList < > ( ) ; for ( String attribute : dataUsage . keySet ( ) ) { propertyNamesForDataUsages . add ( addDataUsage ( attribute , dataUsage . get ( attribute ) ) ) ; } // 2 . Add data usage names to the list of data usages for this activity addActivityWithDataUsage ( activity ) ; props . setProperty ( String . format ( ACTIVITY_DATA_USAGES_FORMAT , activity ) , ArrayUtils . toString ( propertyNamesForDataUsages . toArray ( ) ) ) ;
public class BRSImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public boolean eIsSet ( int featureID ) { } }
switch ( featureID ) { case AfplibPackage . BRS__RS_NAME : return RS_NAME_EDEFAULT == null ? rsName != null : ! RS_NAME_EDEFAULT . equals ( rsName ) ; case AfplibPackage . BRS__TRIPLETS : return triplets != null && ! triplets . isEmpty ( ) ; } return super . eIsSet ( featureID ) ;
public class MineBugHistory { /** * equivalent to out . print ( obj ) except it may be padded on the left or right * @ param width * padding will occur if the stringified oxj is shorter than this * @ param alignRight * true to pad on the left , false to pad on the right * @ param out * the PrintStream printed to * @ param obj * the value to print ( may be an auto - boxed primitive ) */ private static void print ( int width , boolean alignRight , PrintStream out , Object obj ) { } }
String s = String . valueOf ( obj ) ; int padLen = width - s . length ( ) ; if ( alignRight ) { pad ( padLen , out ) ; } out . print ( s ) ; // doesn ' t truncate if ( s . length ( ) > width ) if ( ! alignRight ) { pad ( padLen , out ) ; }
public class RobotApplicationConfigMarshaller { /** * Marshall the given parameter object . */ public void marshall ( RobotApplicationConfig robotApplicationConfig , ProtocolMarshaller protocolMarshaller ) { } }
if ( robotApplicationConfig == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( robotApplicationConfig . getApplication ( ) , APPLICATION_BINDING ) ; protocolMarshaller . marshall ( robotApplicationConfig . getApplicationVersion ( ) , APPLICATIONVERSION_BINDING ) ; protocolMarshaller . marshall ( robotApplicationConfig . getLaunchConfig ( ) , LAUNCHCONFIG_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class DTDIdRefsAttr { /** * Public API , validation */ @ SuppressWarnings ( "cast" ) @ Override public String validate ( DTDValidatorBase v , char [ ] cbuf , int start , int end , boolean normalize ) throws XMLStreamException { } }
/* Let ' s skip leading / trailing white space , even if we are not * to normalize visible attribute value . This allows for better * round - trip handling ( no changes for physical value caller * gets ) , but still allows succesful validation . */ while ( start < end && WstxInputData . isSpaceChar ( cbuf [ start ] ) ) { ++ start ; } // No id ? if ( start >= end ) { return reportValidationProblem ( v , "Empty IDREFS value" ) ; } -- end ; // so that it now points to the last char // We now the first char is not a space by now . . . while ( end > start && WstxInputData . isSpaceChar ( cbuf [ end ] ) ) { -- end ; } // Ok ; now start points to first , end to last char ( both inclusive ) ElementIdMap m = v . getIdMap ( ) ; Location loc = v . getLocation ( ) ; String idStr = null ; StringBuilder sb = null ; while ( start <= end ) { // Ok , need to check char validity , and also calc hash code : char c = cbuf [ start ] ; if ( ! WstxInputData . isNameStartChar ( c , mCfgNsAware , mCfgXml11 ) ) { return reportInvalidChar ( v , c , "not valid as the first IDREFS character" ) ; } int hash = ( int ) c ; int i = start + 1 ; for ( ; i <= end ; ++ i ) { c = cbuf [ i ] ; if ( WstxInputData . isSpaceChar ( c ) ) { break ; } if ( ! WstxInputData . isNameChar ( c , mCfgNsAware , mCfgXml11 ) ) { return reportInvalidChar ( v , c , "not valid as an IDREFS character" ) ; } hash = ( hash * 31 ) + ( int ) c ; } // Ok , got the next id ref . . . ElementId id = m . addReferenced ( cbuf , start , i - start , hash , loc , v . getElemName ( ) , mName ) ; // Can skip the trailing space char ( if there was one ) start = i + 1 ; /* When normalizing , we can possibly share id String , or * alternatively , compose normalized String if multiple */ if ( normalize ) { if ( idStr == null ) { // first idref idStr = id . getId ( ) ; } else { if ( sb == null ) { sb = new StringBuilder ( idStr ) ; } idStr = id . getId ( ) ; sb . append ( ' ' ) ; sb . append ( idStr ) ; } } // Ok , any white space to skip ? while ( start <= end && WstxInputData . isSpaceChar ( cbuf [ start ] ) ) { ++ start ; } } if ( normalize ) { if ( sb != null ) { idStr = sb . toString ( ) ; } return idStr ; } return null ;
public class BigtableTableAdminGCJClient { /** * { @ inheritDoc } */ @ Override public ApiFuture < ListSnapshotsResponse > listSnapshotsAsync ( ListSnapshotsRequest request ) { } }
return baseAdminClient . listSnapshotsCallable ( ) . futureCall ( request ) ;
public class GoogleMapShapeConverter { /** * Convert a list of { @ link Polyline } to a { @ link MultiLineString } * @ param polylineList polyline list * @ param hasZ has z flag * @ param hasM has m flag * @ return multi line string */ public MultiLineString toMultiLineString ( List < Polyline > polylineList , boolean hasZ , boolean hasM ) { } }
MultiLineString multiLineString = new MultiLineString ( hasZ , hasM ) ; for ( Polyline polyline : polylineList ) { LineString lineString = toLineString ( polyline ) ; multiLineString . addLineString ( lineString ) ; } return multiLineString ;
public class StreamGraphGenerator { /** * Generates a { @ code StreamGraph } by traversing the graph of { @ code StreamTransformations } * starting from the given transformations . * @ param env The { @ code StreamExecutionEnvironment } that is used to set some parameters of the * job * @ param transformations The transformations starting from which to transform the graph * @ return The generated { @ code StreamGraph } */ public static StreamGraph generate ( StreamExecutionEnvironment env , List < StreamTransformation < ? > > transformations ) { } }
return new StreamGraphGenerator ( env ) . generateInternal ( transformations ) ;
public class SplittableElementSet { /** * Get a subset for the given partition . * @ param k the partition key * @ return the resulting subset . Empty if no elements belong to the given partition . */ public Set < E > getSubSet ( int k ) { } }
int from = - 1 ; // TODO : very bad . Bounds should be memorized for ( int x = 0 ; x < values . size ( ) ; x ++ ) { int cIdx = index . get ( values . get ( x ) . id ( ) ) ; if ( cIdx == k && from == - 1 ) { from = x ; } if ( from >= 0 && cIdx > k ) { return new ElementSubSet < > ( this , k , from , x ) ; } } if ( from >= 0 ) { return new ElementSubSet < > ( this , k , from , values . size ( ) ) ; } return Collections . emptySet ( ) ;
public class VF2State { /** * { @ inheritDoc } */ public void backTrack ( ) { } }
assert coreLen - origCoreLen <= 1 ; assert addedNode1 != NULL_NODE ; if ( origCoreLen < coreLen ) { int i , node2 ; if ( in1 [ addedNode1 ] == coreLen ) in1 [ addedNode1 ] = 0 ; for ( int other : getPredecessors ( g1 , addedNode1 ) ) { if ( in1 [ other ] == coreLen ) in1 [ other ] = 0 ; } if ( out1 [ addedNode1 ] == coreLen ) out1 [ addedNode1 ] = 0 ; for ( int other : getSuccessors ( g1 , addedNode1 ) ) { if ( out1 [ other ] == coreLen ) out1 [ other ] = 0 ; } node2 = core1 [ addedNode1 ] ; if ( in2 [ node2 ] == coreLen ) in2 [ node2 ] = 0 ; for ( int other : getPredecessors ( g2 , node2 ) ) { if ( in2 [ other ] == coreLen ) in2 [ other ] = 0 ; } if ( out2 [ node2 ] == coreLen ) out2 [ node2 ] = 0 ; for ( int other : getSuccessors ( g2 , node2 ) ) { if ( out2 [ other ] == coreLen ) out2 [ other ] = 0 ; } core1 [ addedNode1 ] = NULL_NODE ; core2 [ node2 ] = NULL_NODE ; coreLen = origCoreLen ; addedNode1 = NULL_NODE ; }
public class TDHttpClient { /** * Submit an API request and get the result as String value ( e . g . json ) * @ param apiRequest * @ param apiKeyCache * @ return */ public String call ( TDApiRequest apiRequest , Optional < String > apiKeyCache ) { } }
String content = submitRequest ( apiRequest , apiKeyCache , stringContentHandler ) ; if ( logger . isTraceEnabled ( ) ) { logger . trace ( "response:\n{}" , content ) ; } return content ;
public class NumberInRange { /** * Test if a number is in an arbitrary range . * @ param number * a number * @ param min * lower boundary of the range * @ param max * upper boundary of the range * @ return true if the given number is within the range */ @ ArgumentsChecked @ Throws ( IllegalNullArgumentException . class ) public static boolean isInRange ( @ Nonnull final Number number , @ Nonnull final BigInteger min , @ Nonnull final BigInteger max ) { } }
Check . notNull ( number , "number" ) ; Check . notNull ( min , "min" ) ; Check . notNull ( max , "max" ) ; BigInteger bigInteger = null ; if ( number instanceof Byte || number instanceof Short || number instanceof Integer || number instanceof Long ) { bigInteger = BigInteger . valueOf ( number . longValue ( ) ) ; } else if ( number instanceof Float || number instanceof Double ) { bigInteger = new BigDecimal ( number . doubleValue ( ) ) . toBigInteger ( ) ; } else if ( number instanceof BigInteger ) { bigInteger = ( BigInteger ) number ; } else if ( number instanceof BigDecimal ) { bigInteger = ( ( BigDecimal ) number ) . toBigInteger ( ) ; } else { throw new IllegalNumberArgumentException ( "Return value is no known subclass of 'java.lang.Number': " + number . getClass ( ) . getName ( ) ) ; } return max . compareTo ( bigInteger ) >= 0 && min . compareTo ( bigInteger ) <= 0 ;
public class NanoHTTPD { /** * URL - encodes everything between " / " - characters . * Encodes spaces as ' % 20 ' instead of ' + ' . * @ throws UnsupportedEncodingException */ private String encodeUri ( String uri ) { } }
String newUri = "" ; StringTokenizer st = new StringTokenizer ( uri , "/ " , true ) ; while ( st . hasMoreTokens ( ) ) { String tok = st . nextToken ( ) ; if ( tok . equals ( "/" ) ) newUri += "/" ; else if ( tok . equals ( " " ) ) newUri += "%20" ; else { try { newUri += URLEncoder . encode ( tok , "UTF-8" ) ; } catch ( UnsupportedEncodingException e ) { throw Log . errRTExcept ( e ) ; } } } return newUri ;
public class PreFillEntryLong { /** * Adds data to this Entry . * @ param pos * @ param val * @ param scn */ public void add ( int pos , long val , long scn ) { } }
if ( _index < _entryCapacity ) { _valArray . get ( _index ++ ) . reinit ( pos , val , scn ) ; maintainScn ( scn ) ; } else { throw new EntryOverflowException ( ) ; }
public class CLDRBase { /** * Resolve a , possibly incomplete , language tag into an expanded * CLDR locale . This performs the " add likely subtags " operation . * http : / / www . unicode . org / reports / tr35 / tr35 . html # Likely _ Subtags */ public CLDR . Locale resolve ( String languageTag ) { } }
MetaLocale meta = MetaLocale . fromLanguageTag ( languageTag ) ; return resolve ( meta ) ;
public class DatabaseAccountsInner { /** * Creates or updates an Azure Cosmos DB database account . * @ param resourceGroupName Name of an Azure resource group . * @ param accountName Cosmos DB database account name . * @ param createUpdateParameters The parameters to provide for the current database account . * @ param serviceCallback the async ServiceCallback to handle successful and failed responses . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the { @ link ServiceFuture } object */ public ServiceFuture < DatabaseAccountInner > createOrUpdateAsync ( String resourceGroupName , String accountName , DatabaseAccountCreateUpdateParameters createUpdateParameters , final ServiceCallback < DatabaseAccountInner > serviceCallback ) { } }
return ServiceFuture . fromResponse ( createOrUpdateWithServiceResponseAsync ( resourceGroupName , accountName , createUpdateParameters ) , serviceCallback ) ;
public class JedisUtils { /** * Create a new { @ link ShardedJedisPool } with specified pool configs . * @ param poolConfig * @ param hostsAndPorts * format { @ code host1 : port1 , host2 : port2 , . . . } , default Redis port is used if not * specified * @ param timeoutMs * @ return */ public static ShardedJedisPool newShardedJedisPool ( JedisPoolConfig poolConfig , String hostsAndPorts , int timeoutMs ) { } }
return newShardedJedisPool ( poolConfig , hostsAndPorts , null , timeoutMs ) ;
public class NewChunk { /** * Return the data so compressed . */ public Chunk compress ( ) { } }
Chunk res = compress2 ( ) ; byte type = type ( ) ; assert _vec == null || // Various testing scenarios do not set a Vec type == _vec . _type || // Equal types // Allow all - bad Chunks in any type of Vec type == Vec . T_BAD || // Specifically allow the NewChunk to be a numeric type ( better be all // ints ) and the selected Vec type an categorical - whose String mapping // may not be set yet . ( type == Vec . T_NUM && _vec . _type == Vec . T_CAT ) || // Another one : numeric Chunk and Time Vec ( which will turn into all longs / zeros / nans Chunks ) ( type == Vec . T_NUM && _vec . _type == Vec . T_TIME && ! res . hasFloat ( ) ) : "NewChunk has type " + Vec . TYPE_STR [ type ] + ", but the Vec is of type " + _vec . get_type_str ( ) ; assert _len == res . _len : "NewChunk has length " + _len + ", compressed Chunk has " + res . _len ; // Force everything to null after compress to free up the memory . Seems // like a non - issue in the land of GC , but the NewChunk * should * be dead // after this , but might drag on . The arrays are large , and during a big // Parse there ' s lots and lots of them . . . so free early just in case a GC // happens before the drag - time on the NewChunk finishes . _id = null ; _xs = null ; _ds = null ; _ms = null ; _is = null ; _ss = null ; return res ;
public class Parser { /** * Test if the < tt > - < / tt > character at < tt > offset < / tt > starts a < tt > - - < / tt > style line comment , * and return the position of the first < tt > \ r < / tt > or < tt > \ n < / tt > character . * @ param query query * @ param offset start offset * @ return position of the first < tt > \ r < / tt > or < tt > \ n < / tt > character */ public static int parseLineComment ( final char [ ] query , int offset ) { } }
if ( offset + 1 < query . length && query [ offset + 1 ] == '-' ) { while ( offset + 1 < query . length ) { offset ++ ; if ( query [ offset ] == '\r' || query [ offset ] == '\n' ) { break ; } } } return offset ;
public class Database { /** * Create aggregate function . * @ param name the name of the new function * @ param nargs number of arguments to function * @ param f interface of function */ public void create_aggregate ( String name , int nargs , Function f ) { } }
synchronized ( this ) { _create_aggregate ( name , nargs , f ) ; }
public class DMatrixUtils { /** * Returns the closest rounded value of the given value for the given steps . * @ param value The original value to be rounded . * @ param steps The steps . * @ return The closest rounded value of the given value for the given steps . */ public static BigDecimal roundToClosest ( double value , double steps ) { } }
final BigDecimal down = DMatrixUtils . roundDownTo ( value , steps ) ; final BigDecimal up = DMatrixUtils . roundUpTo ( value , steps ) ; final BigDecimal orig = new BigDecimal ( String . valueOf ( value ) ) ; if ( orig . subtract ( down ) . abs ( ) . compareTo ( orig . subtract ( up ) . abs ( ) ) < 0 ) { return down ; } return up ;
public class Script { /** * Returns the serialized program as a newly created byte array . */ public byte [ ] getProgram ( ) { } }
try { // Don ' t round - trip as Bitcoin Core doesn ' t and it would introduce a mismatch . if ( program != null ) return Arrays . copyOf ( program , program . length ) ; ByteArrayOutputStream bos = new ByteArrayOutputStream ( ) ; for ( ScriptChunk chunk : chunks ) { chunk . write ( bos ) ; } program = bos . toByteArray ( ) ; return program ; } catch ( IOException e ) { throw new RuntimeException ( e ) ; // Cannot happen . }
public class DeclineHandshakeRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( DeclineHandshakeRequest declineHandshakeRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( declineHandshakeRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( declineHandshakeRequest . getHandshakeId ( ) , HANDSHAKEID_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class MigrationThread { /** * Interrupts the migration thread and joins on it . * < strong > Must not be called on the migration thread itself < / strong > because it will result in infinite blocking . */ void stopNow ( ) { } }
assert currentThread ( ) != this : "stopNow must not be called on the migration thread" ; running = false ; queue . clear ( ) ; interrupt ( ) ; boolean currentThreadInterrupted = false ; while ( true ) { try { join ( ) ; } catch ( InterruptedException e ) { currentThreadInterrupted = true ; continue ; } break ; } if ( currentThreadInterrupted ) { currentThread ( ) . interrupt ( ) ; }
public class PublisherServiceUsingActiveMq { /** * region > init , shutdown */ @ PostConstruct public void init ( Map < String , String > properties ) { } }
vmTransportUrl = properties . getOrDefault ( KEY_VM_TRANSPORT_URL , KEY_VM_TRANSPORT_URL_DEFAULT ) ; memberInteractionsQueueName = properties . getOrDefault ( KEY_MEMBER_INTERACTIONS_QUEUE , KEY_MEMBER_INTERACTIONS_QUEUE_DEFAULT ) ; jmsConnectionFactory = new ActiveMQConnectionFactory ( vmTransportUrl ) ; try { jmsConnection = jmsConnectionFactory . createConnection ( ) ; } catch ( JMSException e ) { LOG . error ( "Unable to create connection" , e ) ; throw new RuntimeException ( e ) ; } try { jmsConnection . start ( ) ; } catch ( JMSException e ) { LOG . error ( "Unable to start connection" , e ) ; closeSafely ( jmsConnection ) ; jmsConnection = null ; }
public class IntegerArrayFilter { /** * { @ inheritDoc } */ @ Override public void insertString ( FilterBypass fb , int offset , String string , AttributeSet attr ) throws BadLocationException { } }
Document doc = fb . getDocument ( ) ; StringBuilder sb = new StringBuilder ( ) ; sb . append ( doc . getText ( 0 , doc . getLength ( ) ) ) ; sb . insert ( offset , string ) ; if ( validate ( sb . toString ( ) ) ) { super . insertString ( fb , offset , string , attr ) ; } else { onValidationError ( sb . toString ( ) ) ; }
public class XMLFilterImpl { /** * Filter a start element event . * @ param uri The element ' s Namespace URI , or the empty string . * @ param localName The element ' s local name , or the empty string . * @ param qName The element ' s qualified ( prefixed ) name , or the empty * string . * @ param atts The element ' s attributes . * @ exception org . xml . sax . SAXException The client may throw * an exception during processing . */ public void startElement ( String uri , String localName , String qName , Attributes atts ) throws SAXException { } }
if ( contentHandler != null ) { contentHandler . startElement ( uri , localName , qName , atts ) ; }
public class WMessageBoxExample { /** * applySettings is used to apply the setting to the various controls on the page . */ public void applySettings ( ) { } }
messageList . clear ( ) ; messageList . add ( "" ) ; for ( int i = 1 ; messageBox . getMessages ( ) . size ( ) >= i ; i ++ ) { messageList . add ( String . valueOf ( i ) ) ; } selRemove . setOptions ( messageList ) ; selRemove . resetData ( ) ; btnRemove . setDisabled ( messageList . isEmpty ( ) ) ; btnRemoveAll . setDisabled ( messageList . isEmpty ( ) ) ; messageBox . setType ( ( com . github . bordertech . wcomponents . WMessageBox . Type ) messageBoxTypeSelect . getSelected ( ) ) ; messageBox . setVisible ( cbVisible . isSelected ( ) ) ; if ( tfTitle . getText ( ) != null && ! "" . equals ( tfTitle . getText ( ) ) ) { messageBox . setTitleText ( tfTitle . getText ( ) ) ; } else { messageBox . setTitleText ( null ) ; }
public class ScriptContent { /** * Executes a script and returns the last value as a String * @ param build the build to act on * @ param scriptStream the script input stream * @ return a String containing the toString of the last item in the script * @ throws IOException */ private String executeScript ( Run < ? , ? > build , FilePath workspace , TaskListener listener , InputStream scriptStream ) throws IOException { } }
String result = "" ; Map binding = new HashMap < > ( ) ; ExtendedEmailPublisherDescriptor descriptor = Jenkins . getActiveInstance ( ) . getDescriptorByType ( ExtendedEmailPublisherDescriptor . class ) ; Item parent = build . getParent ( ) ; binding . put ( "build" , build ) ; binding . put ( "it" , new ScriptContentBuildWrapper ( build ) ) ; binding . put ( "project" , parent ) ; binding . put ( "rooturl" , descriptor . getHudsonUrl ( ) ) ; binding . put ( "workspace" , workspace ) ; PrintStream logger = listener . getLogger ( ) ; binding . put ( "logger" , logger ) ; String scriptContent = IOUtils . toString ( scriptStream , descriptor . getCharset ( ) ) ; if ( scriptStream instanceof UserProvidedContentInputStream ) { ScriptApproval . get ( ) . configuring ( scriptContent , GroovyLanguage . get ( ) , ApprovalContext . create ( ) . withItem ( parent ) ) ; } if ( scriptStream instanceof UserProvidedContentInputStream && ! AbstractEvalContent . isApprovedScript ( scriptContent , GroovyLanguage . get ( ) ) ) { // Unapproved script , run it in the sandbox GroovyShell shell = createEngine ( descriptor , binding , true ) ; Object res = GroovySandbox . run ( shell , scriptContent , new ProxyWhitelist ( Whitelist . all ( ) , new PrintStreamInstanceWhitelist ( logger ) , new EmailExtScriptTokenMacroWhitelist ( ) ) ) ; if ( res != null ) { result = res . toString ( ) ; } } else { if ( scriptStream instanceof UserProvidedContentInputStream ) { ScriptApproval . get ( ) . using ( scriptContent , GroovyLanguage . get ( ) ) ; } // Pre approved script , so run as is GroovyShell shell = createEngine ( descriptor , binding , false ) ; Script script = shell . parse ( scriptContent ) ; Object res = script . run ( ) ; if ( res != null ) { result = res . toString ( ) ; } } return result ;
public class XmlResponsesSaxParser { /** * Parses a ListAllMyBuckets response XML document from an input stream . * @ param inputStream * XML data input stream . * @ return the XML handler object populated with data parsed from the XML * stream . * @ throws SdkClientException */ public ListAllMyBucketsHandler parseListMyBucketsResponse ( InputStream inputStream ) throws IOException { } }
ListAllMyBucketsHandler handler = new ListAllMyBucketsHandler ( ) ; parseXmlInputStream ( handler , sanitizeXmlDocument ( handler , inputStream ) ) ; return handler ;
public class ContainerProducer { /** * Use @ BeforeClass event in order to scan the test class for annotation we might be interesting . * Event fired Before the Class execution . */ public void executeBeforeClass ( @ Observes BeforeClass event , TestClass testClass ) { } }
testClass = event . getTestClass ( ) ; Field [ ] fields = testClass . getJavaClass ( ) . getDeclaredFields ( ) ; for ( Field field : fields ) { if ( field . isAnnotationPresent ( GetDeployableContainer . class ) ) { if ( field . getType ( ) . isAssignableFrom ( ContainerManagerTool . class ) ) { isGetDeployableContainerAnnoPresent = true ; deployableContainerFields . add ( field ) ; } } }
public class CanalServerWithEmbedded { /** * 查询所有的订阅信息 */ public List < ClientIdentity > listAllSubscribe ( String destination ) throws CanalServerException { } }
CanalInstance canalInstance = canalInstances . get ( destination ) ; return canalInstance . getMetaManager ( ) . listAllSubscribeInfo ( destination ) ;
public class VisualContext { /** * Updates the context according to the given element style . The properties that are not defined * in the style are left unchanged . * @ param style the style data */ public void update ( NodeData style ) { } }
// setup the font CSSProperty . FontWeight weight = style . getProperty ( "font-weight" ) ; if ( weight != null ) fontWeight = weight ; CSSProperty . FontStyle fstyle = style . getProperty ( "font-style" ) ; if ( fstyle != null ) fontStyle = fstyle ; String family = null ; CSSProperty . FontFamily ff = style . getProperty ( "font-family" ) ; if ( ff == null ) { family = font . getFamily ( ) ; // use current } else if ( ff == FontFamily . list_values ) { TermList fmlspec = style . getValue ( TermList . class , "font-family" ) ; if ( fmlspec == null ) family = font . getFamily ( ) ; else family = getFontName ( fmlspec , fontWeight , fontStyle ) ; } else { if ( factory != null ) family = factory . getConfig ( ) . getDefaultFont ( ff . getAWTValue ( ) ) ; // try to translate to physical font if ( family == null ) family = ff . getAWTValue ( ) ; // could not translate - use as is } double size ; double psize = ( parent == null ) ? CSSUnits . medium_font : parent . getEm ( ) ; CSSProperty . FontSize fsize = style . getProperty ( "font-size" ) ; if ( fsize == null ) size = em ; else if ( fsize == CSSProperty . FontSize . length || fsize == CSSProperty . FontSize . percentage ) { TermLengthOrPercent lenspec = style . getValue ( TermLengthOrPercent . class , "font-size" ) ; if ( lenspec != null ) { em = psize ; size = pxLength ( lenspec , psize ) ; // pixels are ok here ( java is fixed to 72 dpi for font sizes ) } else size = em ; } else size = CSSUnits . convertFontSize ( psize , fsize ) ; fontSize = CSSUnits . points ( size ) ; if ( rootContext != null ) rem = rootContext . getEm ( ) ; else rem = em ; // we don ' t have a root context ? font = createFont ( family , ( int ) Math . round ( size ) , fontWeight , fontStyle , letterSpacing ) ; em = size ; CSSProperty . FontVariant variant = style . getProperty ( "font-variant" ) ; if ( variant != null ) fontVariant = variant ; CSSProperty . TextDecoration decor = style . getProperty ( "text-decoration" ) ; textDecoration . clear ( ) ; if ( decor != null ) { if ( decor == TextDecoration . list_values ) { TermList list = style . getValue ( TermList . class , "text-decoration" ) ; for ( Term < ? > t : list ) { if ( t . getValue ( ) instanceof CSSProperty . TextDecoration ) textDecoration . add ( ( CSSProperty . TextDecoration ) t . getValue ( ) ) ; } } else if ( decor != TextDecoration . NONE ) textDecoration . add ( decor ) ; } // letter spacing CSSProperty . LetterSpacing spacing = style . getProperty ( "letter-spacing" ) ; if ( spacing != null ) { if ( spacing == CSSProperty . LetterSpacing . NORMAL ) letterSpacing = 0.0 ; else { TermLength lenspec = style . getValue ( TermLength . class , "letter-spacing" ) ; if ( lenspec != null ) letterSpacing = pxLength ( lenspec ) ; } } // color TermColor clr = style . getSpecifiedValue ( TermColor . class , "color" ) ; if ( clr != null ) color = CSSUnits . convertColor ( clr . getValue ( ) ) ;
public class CSSStyleSheetListImpl { /** * Merges all StyleSheets in this list into one . * @ return the new ( merged ) StyleSheet */ public CSSStyleSheetImpl merge ( ) { } }
final CSSStyleSheetImpl merged = new CSSStyleSheetImpl ( ) ; final CSSRuleListImpl cssRuleList = new CSSRuleListImpl ( ) ; final Iterator < CSSStyleSheetImpl > it = getCSSStyleSheets ( ) . iterator ( ) ; while ( it . hasNext ( ) ) { final CSSStyleSheetImpl cssStyleSheet = it . next ( ) ; final CSSMediaRuleImpl cssMediaRule = new CSSMediaRuleImpl ( merged , null , cssStyleSheet . getMedia ( ) ) ; cssMediaRule . setRuleList ( cssStyleSheet . getCssRules ( ) ) ; cssRuleList . add ( cssMediaRule ) ; } merged . setCssRules ( cssRuleList ) ; merged . setMediaText ( "all" ) ; return merged ;
public class PdfPKCS7 { /** * Update the digest with the specified bytes . This method is used both for signing and verifying * @ param buf the data buffer * @ param off the offset in the data buffer * @ param len the data length * @ throws SignatureException on error */ public void update ( byte [ ] buf , int off , int len ) throws SignatureException { } }
if ( RSAdata != null || digestAttr != null ) messageDigest . update ( buf , off , len ) ; else sig . update ( buf , off , len ) ;
public class AbstractInjectionEngine { /** * F1339-9050 */ @ Override public < A extends Annotation > void registerOverrideReferenceFactory ( Class < A > annotation , OverrideReferenceFactory < A > factory ) throws InjectionException { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) Tr . debug ( tc , "registerOverrideReferenceFactory" , annotation , factory ) ; if ( annotation == null || factory == null ) { throw new IllegalArgumentException ( "Null arguments are not allowed: " + annotation + ", " + factory ) ; } // Now add the new factory to the map , per annotation . synchronized ( this ) { HashMap < Class < ? extends Annotation > , OverrideReferenceFactory < ? > [ ] > map = ivOverrideReferenceFactoryMap ; if ( ivOverrideReferenceFactoryMapCopyOnWrite ) // PM79779 F743-32696 { HashMap < Class < ? extends Annotation > , OverrideReferenceFactory < ? > [ ] > newMap = new HashMap < Class < ? extends Annotation > , OverrideReferenceFactory < ? > [ ] > ( ) ; for ( Map . Entry < Class < ? extends Annotation > , OverrideReferenceFactory < ? > [ ] > entry : map . entrySet ( ) ) { OverrideReferenceFactory < ? > [ ] value = entry . getValue ( ) ; OverrideReferenceFactory < ? > [ ] newValue = new OverrideReferenceFactory [ value . length ] ; System . arraycopy ( value , 0 , newValue , 0 , value . length ) ; newMap . put ( entry . getKey ( ) , newValue ) ; } map = newMap ; } OverrideReferenceFactory < ? > [ ] factories = map . get ( annotation ) ; if ( factories == null ) { if ( ! ivProcessorProviders . containsKey ( annotation ) ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) Tr . debug ( tc , "registerOverrideReferenceFactory: An injection " + "processor does not exist for the specified annotation: " + annotation . getName ( ) ) ; throw new InjectionException ( "An injection processor does not exist for the specified annotation: " + annotation . getName ( ) ) ; } factories = new OverrideReferenceFactory [ 1 ] ; factories [ 0 ] = factory ; map . put ( annotation , factories ) ; } else { OverrideReferenceFactory < ? > [ ] newFactories = new OverrideReferenceFactory [ factories . length + 1 ] ; System . arraycopy ( factories , 0 , newFactories , 0 , factories . length ) ; newFactories [ factories . length ] = factory ; map . put ( annotation , newFactories ) ; } if ( ivOverrideReferenceFactoryMapCopyOnWrite ) // PM79779 F743-32696 { // Replace exiting map after all updates have been made . ivOverrideReferenceFactoryMap = map ; ivOverrideReferenceFactoryMapCopyOnWrite = false ; } }
public class PrimitiveTransformation { /** * < code > . google . privacy . dlp . v2 . TimePartConfig time _ part _ config = 8 ; < / code > */ public com . google . privacy . dlp . v2 . TimePartConfig getTimePartConfig ( ) { } }
if ( transformationCase_ == 8 ) { return ( com . google . privacy . dlp . v2 . TimePartConfig ) transformation_ ; } return com . google . privacy . dlp . v2 . TimePartConfig . getDefaultInstance ( ) ;
public class Syslog { /** * Use getInstance ( protocol ) as the starting point for Syslog4j . * @ param protocol - the Syslog protocol to use , e . g . " udp " , " tcp " , " unix _ syslog " , " unix _ socket " , or a custom protocol * @ return Returns an instance of SyslogIF . * @ throws SyslogRuntimeException */ public static final SyslogIF getInstance ( String protocol ) throws SyslogRuntimeException { } }
String _protocol = protocol . toLowerCase ( ) ; if ( instances . containsKey ( _protocol ) ) { return ( SyslogIF ) instances . get ( _protocol ) ; } else { StringBuffer message = new StringBuffer ( "Syslog protocol \"" + protocol + "\" not defined; call Syslogger.createSyslogInstance(protocol,config) first" ) ; if ( instances . size ( ) > 0 ) { message . append ( " or use one of the following instances: " ) ; Iterator i = instances . keySet ( ) . iterator ( ) ; while ( i . hasNext ( ) ) { String k = ( String ) i . next ( ) ; message . append ( k ) ; if ( i . hasNext ( ) ) { message . append ( ' ' ) ; } } } throwRuntimeException ( message . toString ( ) ) ; return null ; }
public class GuiStandardUtils { /** * Make a vertical row of buttons of equal size , whch are equally spaced , * and aligned on the right . * The returned component has border spacing only on the left ( of the size * recommended by the Look and Feel Design Guidelines ) . All other spacing * must be applied elsewhere ; usually , this will only mean that the * dialog ' s top - level panel should use { @ link # buildStandardBorder } . * @ param buttons * contains < code > JButton < / code > objects . * @ return A column displaying the buttons vertically . */ public static JComponent createCommandButtonColumn ( JButton [ ] buttons ) { } }
ButtonStackBuilder builder = new ButtonStackBuilder ( ) ; for ( int i = 0 ; i < buttons . length ; i ++ ) { if ( i > 0 ) { builder . addRelatedGap ( ) ; } builder . addGridded ( buttons [ i ] ) ; } return builder . getPanel ( ) ;
public class DirectoryHelper { /** * Rename file . Trying to remove destination first . * If file can ' t be renamed in standard way the coping * data will be used instead . * @ param srcFile * source file * @ param dstFile * destination file * @ throws IOException * if any exception occurred */ public static void deleteDstAndRename ( File srcFile , File dstFile ) throws IOException { } }
if ( dstFile . exists ( ) ) { if ( ! dstFile . delete ( ) ) { throw new IOException ( "Cannot delete " + dstFile ) ; } } renameFile ( srcFile , dstFile ) ;
public class DocumentFile { /** * This method takes a string and expands the inclusion statements found in it . * It returns the fully expanded string . An inclusion is a line that contains * ! code < filename > * @ param content Initial content * @ return Expanded content by inserting the included files * @ throws Exception */ private String expandInclusion ( String content ) throws Exception { } }
// Quick check { Matcher matcher = patternInclusion . matcher ( content ) ; if ( false == matcher . find ( ) ) { return content ; } } // OK , perform processing StringWriter output = new StringWriter ( ) ; StringReader sr = new StringReader ( content ) ; BufferedReader br = new BufferedReader ( sr ) ; String line = br . readLine ( ) ; while ( null != line ) { Matcher matcher = patternInclusion . matcher ( line ) ; if ( matcher . find ( ) ) { output . write ( line ) ; String filePath = matcher . group ( 1 ) ; FSEntry includedEntry = FSEntrySupport . findDescendant ( includeDir , filePath ) ; if ( null != includedEntry ) { output . write ( "\n" ) ; insertFile ( output , includedEntry ) ; } else { output . write ( " - not found\n" ) ; } } else { output . write ( line ) ; output . write ( "\n" ) ; } line = br . readLine ( ) ; } return output . toString ( ) ;
public class MessageProcessorMatching { /** * Concatenates topicspace and a topic expression with a level separator between . * Null topics are treated as wildcarded topics at the root level so the * combination topic becomes topicSpace / / . * Empty topics are treated as a subscription to the root level so becomes * topicSpace / * Topics that begin / / need to be treated as topicSpace / / * Topics that begin / are rejected * @ param topicSpace The topicspace name * @ param discriminator The topic * @ exception SIDiscriminatorSyntaxException If the topic starts with a leading / */ public String buildAddTopicExpression ( String destName , String discriminator ) throws SIDiscriminatorSyntaxException { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "buildAddTopicExpression" , new Object [ ] { destName , discriminator } ) ; String combo = null ; if ( discriminator == null ) combo = destName + MatchSpace . SUBTOPIC_DOUBLE_SEPARATOR_STOP_STRING ; else if ( discriminator . trim ( ) . length ( ) == 0 ) combo = destName ; else if ( discriminator . startsWith ( MatchSpace . SUBTOPIC_DOUBLE_SEPARATOR_STRING ) ) combo = destName + discriminator ; else if ( discriminator . startsWith ( "" + MatchSpace . SUBTOPIC_SEPARATOR_CHAR ) ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "buildAddTopicExpression" , "SISelectorSyntaxException" ) ; throw new SIDiscriminatorSyntaxException ( nls . getFormattedMessage ( "INVALID_TOPIC_ERROR_CWSIP0372" , new Object [ ] { discriminator } , null ) ) ; } else combo = destName + MatchSpace . SUBTOPIC_SEPARATOR_CHAR + discriminator ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "buildAddTopicExpression" , combo ) ; return combo ;
public class PredictionsImpl { /** * Gets predictions for a given utterance , in the form of intents and entities . The current maximum query size is 500 characters . * @ param appId The LUIS application ID ( Guid ) . * @ param query The utterance to predict . * @ param resolveOptionalParameter the object representing the optional parameters to be set before calling this API * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the LuisResult object */ public Observable < LuisResult > resolveAsync ( String appId , String query , ResolveOptionalParameter resolveOptionalParameter ) { } }
return resolveWithServiceResponseAsync ( appId , query , resolveOptionalParameter ) . map ( new Func1 < ServiceResponse < LuisResult > , LuisResult > ( ) { @ Override public LuisResult call ( ServiceResponse < LuisResult > response ) { return response . body ( ) ; } } ) ;
public class EntryStream { /** * Returns a { @ link Map } where elements of this stream with the same key are * grouped together . The resulting { @ code Map } keys are the keys of this * stream entries and the corresponding values are combined using the * provided downstream collector . * There are no guarantees on the type , mutability , serializability , or * thread - safety of the { @ code Map } object returned . If more control over * the returned { @ code Map } is required , use * { @ link # grouping ( Supplier , Collector ) } . * This is a < a href = " package - summary . html # StreamOps " > terminal < / a > * operation . * @ param < A > the intermediate accumulation type of the downstream collector * @ param < D > the result type of the downstream reduction * @ param downstream a { @ code Collector } implementing the downstream * reduction * @ return a { @ code Map } containing the elements of this stream * @ see Collectors # groupingBy ( Function , Collector ) */ public < A , D > Map < K , D > grouping ( Collector < ? super V , A , D > downstream ) { } }
Function < Entry < K , V > , K > keyMapper = Entry :: getKey ; Collector < Entry < K , V > , ? , D > mapping = Collectors . mapping ( Entry :: getValue , downstream ) ; if ( isParallel ( ) && downstream . characteristics ( ) . contains ( Characteristics . UNORDERED ) ) { return collect ( Collectors . groupingByConcurrent ( keyMapper , mapping ) ) ; } return collect ( Collectors . groupingBy ( keyMapper , mapping ) ) ;
public class ByteArrayWriter { /** * Write a String to the byte array converting the bytes using the given * character set . * @ param str * @ param charset * @ throws IOException */ public void writeString ( String str , String charset ) throws IOException { } }
if ( str == null ) { writeInt ( 0 ) ; } else { byte [ ] tmp ; if ( ByteArrayReader . encode ) tmp = str . getBytes ( charset ) ; else tmp = str . getBytes ( ) ; writeInt ( tmp . length ) ; write ( tmp ) ; }
public class MiscUtil { /** * Removes any leading new lines from the string . * A newline is one of & quot ; < code > \ n < / code > & quot ; , * & quot ; < code > \ r < / code > & quot ; , or & quot ; < code > \ r \ n < / code > & quot ; . < / p > * @ param string * @ return */ static public String removeLeadingNewLines ( String string ) { } }
if ( string == null ) return string ; int pos = 0 ; int len = string . length ( ) ; boolean done = false ; while ( ! done ) { char c = string . charAt ( pos ) ; if ( c == '\n' || c == '\r' ) { pos ++ ; } else { done = true ; } if ( pos == len ) { done = true ; } } String result = string . substring ( pos ) ; logger . debug ( "removed " + pos + " new line characters" ) ; return result ;
public class OAuth { /** * Finish the oauth flow after the user was redirected back . * @ param code * Code returned by the Eve Online SSO * @ param state * This should be some secret to prevent XRSF see * getAuthorizationUri * @ throws net . troja . eve . esi . ApiException */ public void finishFlow ( final String code , final String state ) throws ApiException { } }
if ( account == null ) throw new IllegalArgumentException ( "Auth is not set" ) ; if ( codeVerifier == null ) throw new IllegalArgumentException ( "code_verifier is not set" ) ; if ( account . getClientId ( ) == null ) throw new IllegalArgumentException ( "client_id is not set" ) ; StringBuilder builder = new StringBuilder ( ) ; builder . append ( "grant_type=" ) ; builder . append ( encode ( "authorization_code" ) ) ; builder . append ( "&client_id=" ) ; builder . append ( encode ( account . getClientId ( ) ) ) ; builder . append ( "&code=" ) ; builder . append ( encode ( code ) ) ; builder . append ( "&code_verifier=" ) ; builder . append ( encode ( codeVerifier ) ) ; update ( account , builder . toString ( ) ) ;
public class Graph { /** * Apply a function to the attribute of each vertex in the graph . * @ param mapper the map function to apply . * @ return a new graph */ @ SuppressWarnings ( { } }
"unchecked" , "rawtypes" } ) public < NV > Graph < K , NV , EV > mapVertices ( final MapFunction < Vertex < K , VV > , NV > mapper ) { TypeInformation < K > keyType = ( ( TupleTypeInfo < ? > ) vertices . getType ( ) ) . getTypeAt ( 0 ) ; TypeInformation < NV > valueType ; if ( mapper instanceof ResultTypeQueryable ) { valueType = ( ( ResultTypeQueryable ) mapper ) . getProducedType ( ) ; } else { valueType = TypeExtractor . createTypeInfo ( MapFunction . class , mapper . getClass ( ) , 1 , vertices . getType ( ) , null ) ; } TypeInformation < Vertex < K , NV > > returnType = ( TypeInformation < Vertex < K , NV > > ) new TupleTypeInfo ( Vertex . class , keyType , valueType ) ; return mapVertices ( mapper , returnType ) ;
public class XsdEmitter { /** * Create an XML schema enumeration facet . * @ param conditionValue the value to set * @ return an XML schema enumeration facet */ protected XmlSchemaEnumerationFacet createEnumerationFacet ( final String conditionValue ) { } }
XmlSchemaEnumerationFacet xmlSchemaEnumerationFacet = new XmlSchemaEnumerationFacet ( ) ; xmlSchemaEnumerationFacet . setValue ( conditionValue ) ; return xmlSchemaEnumerationFacet ;
public class Transaction { /** * Executes the specified event as part of this transaction . * @ param event the event to execute in this transaction context * @ param args the values to be used by the event * @ return results from the transaction , can be null * @ throws org . dasein . persist . PersistenceException an error occurred * interacting with the database * @ deprecated use { @ link # execute ( Class , Map ) } */ public HashMap < String , Object > execute ( Execution event , Map < String , Object > args ) throws PersistenceException { } }
Map < String , Object > r = execute ( event . getClass ( ) , args ) ; if ( r == null ) { return null ; } if ( r instanceof HashMap ) { return ( HashMap < String , Object > ) r ; } else { HashMap < String , Object > tmp = new HashMap < String , Object > ( ) ; tmp . putAll ( r ) ; return tmp ; }
public class ElasticsearchClientV5 { /** * Wait for an index to become at least yellow ( all primaries assigned ) * @ param index index name * @ throws IOException In case of error */ public void waitForHealthyIndex ( String index ) throws IOException { } }
logger . debug ( "wait for yellow health on index [{}]" , index ) ; Response restResponse = lowLevelClient . performRequest ( "GET" , "/_cluster/health/" + index , Collections . singletonMap ( "wait_for_status" , "yellow" ) ) ; logger . trace ( "health response: {}" , asMap ( restResponse ) ) ;
public class GeoPackageExtensions { /** * Delete the WKT for Coordinate Reference Systems extension * @ param geoPackage * GeoPackage * @ since 3.2.0 */ public static void deleteCrsWktExtension ( GeoPackageCore geoPackage ) { } }
CrsWktExtension crsWktExtension = new CrsWktExtension ( geoPackage ) ; if ( crsWktExtension . has ( ) ) { crsWktExtension . removeExtension ( ) ; }
public class EventNotificationManager { /** * Removes a subscription of an { @ link EventListener } object for the given event type . * @ param eventListener * the event listener to remove the subscription for * @ param eventType * the event type to remove the subscription for */ public void unsubscribeFromEvent ( final EventListener eventListener , final Class < ? extends AbstractEvent > eventType ) { } }
synchronized ( this . subscriptions ) { List < EventListener > subscribers = this . subscriptions . get ( eventType ) ; if ( subscribers == null ) { return ; } subscribers . remove ( eventListener ) ; if ( subscribers . isEmpty ( ) ) { this . subscriptions . remove ( eventType ) ; } }
public class DO { /** * < div color = ' red ' style = " font - size : 24px ; color : red " > < b > < i > < u > JCYPHER < / u > < / i > < / b > < / div > * < div color = ' red ' style = " font - size : 18px ; color : red " > < i > set a property of a node or relation in a DO clause < / i > < / div > * < div color = ' red ' style = " font - size : 18px ; color : red " > < i > e . g . . . . < b > SET ( n . property ( " age " ) ) < / b > . to ( 20 ) . . . < / i > < / div > * < br / > */ public static Set < ModifyTerminal > SET ( JcProperty property ) { } }
Set < ModifyTerminal > ret = ModifyFactory . setPropertyInDO ( property ) ; ASTNode an = APIObjectAccess . getAstNode ( ret ) ; an . setClauseType ( ClauseType . SET ) ; return ret ;