signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class AccessibilityNodeInfoUtils { /** * Gets the text of a < code > node < / code > by returning the content description * ( if available ) or by returning the text . * @ param node The node . * @ return The node text . */ public static CharSequence getNodeText ( AccessibilityNodeInfoCompat node ) { } }
if ( node == null ) { return null ; } // Prefer content description over text . // TODO : Why are we checking the trimmed length ? final CharSequence contentDescription = node . getContentDescription ( ) ; if ( ! TextUtils . isEmpty ( contentDescription ) && ( TextUtils . getTrimmedLength ( contentDescription ) > 0 ) ) { return contentDescription ; } final CharSequence text = node . getText ( ) ; if ( ! TextUtils . isEmpty ( text ) && ( TextUtils . getTrimmedLength ( text ) > 0 ) ) { return text ; } return null ;
public class CRFClassifier { /** * Makes a CRFDatum by producing features and a label from input data at a * specific position , using the provided factory . * @ param info * The input data * @ param loc * The position to build a datum at * @ param featureFactory * The FeatureFactory to use to extract features * @ return The constructed CRFDatum */ public CRFDatum < List < String > , CRFLabel > makeDatum ( List < IN > info , int loc , edu . stanford . nlp . sequences . FeatureFactory < IN > featureFactory ) { } }
pad . set ( AnswerAnnotation . class , flags . backgroundSymbol ) ; PaddedList < IN > pInfo = new PaddedList < IN > ( info , pad ) ; ArrayList < List < String > > features = new ArrayList < List < String > > ( ) ; // for ( int i = 0 ; i < windowSize ; i + + ) { // List featuresC = new ArrayList ( ) ; // for ( int j = 0 ; j < FeatureFactory . win [ i ] . length ; j + + ) { // featuresC . addAll ( featureFactory . features ( info , loc , // FeatureFactory . win [ i ] [ j ] ) ) ; // features . add ( featuresC ) ; Collection < Clique > done = new HashSet < Clique > ( ) ; for ( int i = 0 ; i < windowSize ; i ++ ) { List < String > featuresC = new ArrayList < String > ( ) ; List < Clique > windowCliques = FeatureFactory . getCliques ( i , 0 ) ; windowCliques . removeAll ( done ) ; done . addAll ( windowCliques ) ; for ( Clique c : windowCliques ) { featuresC . addAll ( featureFactory . getCliqueFeatures ( pInfo , loc , c ) ) ; // todo useless copy because of typing reasons } features . add ( featuresC ) ; } int [ ] labels = new int [ windowSize ] ; for ( int i = 0 ; i < windowSize ; i ++ ) { String answer = pInfo . get ( loc + i - windowSize + 1 ) . get ( AnswerAnnotation . class ) ; labels [ i ] = classIndex . indexOf ( answer ) ; } printFeatureLists ( pInfo . get ( loc ) , features ) ; CRFDatum < List < String > , CRFLabel > d = new CRFDatum < List < String > , CRFLabel > ( features , new CRFLabel ( labels ) ) ; // System . err . println ( d ) ; return d ;
public class DraweeHolder { /** * Sets a new controller . */ public void setController ( @ Nullable DraweeController draweeController ) { } }
boolean wasAttached = mIsControllerAttached ; if ( wasAttached ) { detachController ( ) ; } // Clear the old controller if ( isControllerValid ( ) ) { mEventTracker . recordEvent ( Event . ON_CLEAR_OLD_CONTROLLER ) ; mController . setHierarchy ( null ) ; } mController = draweeController ; if ( mController != null ) { mEventTracker . recordEvent ( Event . ON_SET_CONTROLLER ) ; mController . setHierarchy ( mHierarchy ) ; } else { mEventTracker . recordEvent ( Event . ON_CLEAR_CONTROLLER ) ; } if ( wasAttached ) { attachController ( ) ; }
public class HeightSpec { /** * Get the effective height based on the passed available height . This may not * be called for star or auto height elements . * @ param fAvailableHeight * The available height . * @ return The effective height to use . */ @ Nonnegative public float getEffectiveValue ( final float fAvailableHeight ) { } }
switch ( m_eType ) { case ABSOLUTE : return Math . min ( m_fValue , fAvailableHeight ) ; case PERCENTAGE : return fAvailableHeight * m_fValue / 100 ; default : throw new IllegalStateException ( "Unsupported: " + m_eType + " - must be calculated outside!" ) ; }
public class DefaultGrailsControllerClass { /** * Invokes the controller action for the given name on the given controller instance * @ param controller The controller instance * @ param action The action name * @ return The result of the action * @ throws Throwable */ @ Override public Object invoke ( Object controller , String action ) throws Throwable { } }
if ( action == null ) action = this . defaultActionName ; ActionInvoker handle = actions . get ( action ) ; if ( handle == null ) throw new IllegalArgumentException ( "Invalid action name: " + action ) ; return handle . invoke ( controller ) ;
public class RebalanceController { /** * Executes the rebalance plan . Does so batch - by - batch . Between each batch , * status is dumped to logger . info . * @ param rebalancePlan */ private void executePlan ( RebalancePlan rebalancePlan ) { } }
logger . info ( "Starting to execute rebalance Plan!" ) ; int batchCount = 0 ; int partitionStoreCount = 0 ; long totalTimeMs = 0 ; List < RebalanceBatchPlan > entirePlan = rebalancePlan . getPlan ( ) ; int numBatches = entirePlan . size ( ) ; int numPartitionStores = rebalancePlan . getPartitionStoresMoved ( ) ; for ( RebalanceBatchPlan batchPlan : entirePlan ) { logger . info ( "======== REBALANCING BATCH " + ( batchCount + 1 ) + " ========" ) ; RebalanceUtils . printBatchLog ( batchCount , logger , batchPlan . toString ( ) ) ; long startTimeMs = System . currentTimeMillis ( ) ; // ACTUALLY DO A BATCH OF REBALANCING ! executeBatch ( batchCount , batchPlan ) ; totalTimeMs += ( System . currentTimeMillis ( ) - startTimeMs ) ; // Bump up the statistics batchCount ++ ; partitionStoreCount += batchPlan . getPartitionStoreMoves ( ) ; batchStatusLog ( batchCount , numBatches , partitionStoreCount , numPartitionStores , totalTimeMs ) ; }
public class BNFHeadersImpl { /** * This method is used to skip leading CRLF characters . It will stop when * it finds a non - CRLF character , runs out of data , or finds too many CRLFs * @ param buffer * @ return TokenCodes - - MOREDATA means it ran out of buffer information , * DELIM means it found a non - CRLF character , and CRLF means it found * too many CRLFs */ protected TokenCodes skipCRLFs ( WsByteBuffer buffer ) { } }
int maxCRLFs = 33 ; // limit is the max number of CRLFs to skip if ( this . bytePosition >= this . byteLimit ) { if ( ! fillByteCache ( buffer ) ) { // no more data return TokenCodes . TOKEN_RC_MOREDATA ; } } byte b = this . byteCache [ this . bytePosition ++ ] ; for ( int i = 0 ; i < maxCRLFs ; i ++ ) { if ( - 1 == b ) { // ran out of data return TokenCodes . TOKEN_RC_MOREDATA ; } if ( BNFHeaders . CR != b && BNFHeaders . LF != b ) { // stopped on non - CRLF character , reset position this . bytePosition -- ; return TokenCodes . TOKEN_RC_DELIM ; } // keep going otherwise if ( this . bytePosition >= this . byteLimit ) { return TokenCodes . TOKEN_RC_MOREDATA ; } b = this . byteCache [ this . bytePosition ++ ] ; } // found too many CRLFs . . . invalid if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "Too many leading CRLFs found" ) ; } return TokenCodes . TOKEN_RC_CRLF ;
public class ServerChannelUpdater { /** * Removes a permission overwrite for the given entity . * @ param < T > The type of entity to hold the permission , usually < code > User < / code > or < code > Role < / code > * @ param permissionable The entity which permission overwrite should be removed . * @ return The current instance in order to chain call methods . */ public < T extends Permissionable & DiscordEntity > ServerChannelUpdater removePermissionOverwrite ( T permissionable ) { } }
delegate . removePermissionOverwrite ( permissionable ) ; return this ;
public class ComputerVisionImpl { /** * This operation extracts a rich set of visual features based on the image content . * @ param image An image stream . * @ param visualFeatures A string indicating what visual feature types to return . Multiple values should be comma - separated . Valid visual feature types include : Categories - categorizes image content according to a taxonomy defined in documentation . Tags - tags the image with a detailed list of words related to the image content . Description - describes the image content with a complete English sentence . Faces - detects if faces are present . If present , generate coordinates , gender and age . ImageType - detects if image is clipart or a line drawing . Color - determines the accent color , dominant color , and whether an image is black & amp ; white . Adult - detects if the image is pornographic in nature ( depicts nudity or a sex act ) . Sexually suggestive content is also detected . * @ param details A string indicating which domain - specific details to return . Multiple values should be comma - separated . Valid visual feature types include : Celebrities - identifies celebrities if detected in the image . Possible values include : ' Celebrities ' , ' Landmarks ' * @ param language The desired language for output generation . If this parameter is not specified , the default value is & amp ; quot ; en & amp ; quot ; . Supported languages : en - English , Default . es - Spanish , ja - Japanese , pt - Portuguese , zh - Simplified Chinese . Possible values include : ' en ' , ' es ' , ' ja ' , ' pt ' , ' zh ' * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the ImageAnalysis object */ public Observable < ServiceResponse < ImageAnalysis > > analyzeImageInStreamWithServiceResponseAsync ( byte [ ] image , List < VisualFeatureTypes > visualFeatures , String details , String language ) { } }
if ( this . client . endpoint ( ) == null ) { throw new IllegalArgumentException ( "Parameter this.client.endpoint() is required and cannot be null." ) ; } if ( image == null ) { throw new IllegalArgumentException ( "Parameter image is required and cannot be null." ) ; } Validator . validate ( visualFeatures ) ; String parameterizedHost = Joiner . on ( ", " ) . join ( "{Endpoint}" , this . client . endpoint ( ) ) ; String visualFeaturesConverted = this . client . serializerAdapter ( ) . serializeList ( visualFeatures , CollectionFormat . CSV ) ; RequestBody imageConverted = RequestBody . create ( MediaType . parse ( "application/octet-stream" ) , image ) ; return service . analyzeImageInStream ( visualFeaturesConverted , details , language , imageConverted , this . client . acceptLanguage ( ) , parameterizedHost , this . client . userAgent ( ) ) . flatMap ( new Func1 < Response < ResponseBody > , Observable < ServiceResponse < ImageAnalysis > > > ( ) { @ Override public Observable < ServiceResponse < ImageAnalysis > > call ( Response < ResponseBody > response ) { try { ServiceResponse < ImageAnalysis > clientResponse = analyzeImageInStreamDelegate ( response ) ; return Observable . just ( clientResponse ) ; } catch ( Throwable t ) { return Observable . error ( t ) ; } } } ) ;
public class JvmTypesBuilder { /** * / * @ Nullable */ @ Deprecated public JvmAnnotationReference toAnnotation ( /* @ Nullable */ EObject sourceElement , /* @ Nullable */ Class < ? > annotationType ) { } }
return toAnnotation ( sourceElement , annotationType , null ) ;
public class NodeCache { /** * NOTE : this is a BLOCKING method . Completely rebuild the internal cache by querying * for all needed data WITHOUT generating any events to send to listeners . * @ throws Exception errors */ public void rebuild ( ) throws Exception { } }
Preconditions . checkState ( state . get ( ) == State . STARTED , "Not started" ) ; internalRebuild ( ) ; reset ( ) ;
public class NetworkBuffer { /** * Creates a new configuration for the network buffer identified by the installation * ID . * The configuration is added to the network buffer specified by the installation ID . * If a network buffer with the supplied installation ID does not exist , it will be * created . If < code > null < / code > or an empty string is supplied for the installation * ID , a new default ID is generated ( see { @ link NetworkBuffer # createBuffer ( String ) } . * < br > * If the supplied < code > link < / code > gets closed , the created configuration will get * deactivated ( see { @ link Configuration # activate ( boolean ) } ) , and the buffered link * of the configuration , obtained with { @ link Configuration # getBufferedLink ( ) } , will * get closed as well . * @ param link KNX network link communicating with the KNX network * @ param installationID installation identifier for the network buffer , or * < code > null < / code > * @ return the new configuration */ public static Configuration createConfiguration ( KNXNetworkLink link , String installationID ) { } }
NetworkBuffer b = getBuffer ( installationID ) ; if ( b == null ) b = createBuffer ( installationID ) ; return b . createConfiguration ( link ) ;
public class HttpSupport { /** * Parses name from hash syntax . * @ param param something like this : < code > person [ account ] < / code > * @ return name of hash key : < code > account < / code > */ private static String parseHashName ( String param ) { } }
Matcher matcher = hashPattern . matcher ( param ) ; String name = null ; while ( matcher . find ( ) ) { name = matcher . group ( 0 ) ; } return name == null ? null : name . substring ( 1 , name . length ( ) - 1 ) ;
public class CRLExtensions { /** * Get the extension with this alias . * @ param alias the identifier string for the extension to retrieve . */ public Extension get ( String alias ) { } }
X509AttributeName attr = new X509AttributeName ( alias ) ; String name ; String id = attr . getPrefix ( ) ; if ( id . equalsIgnoreCase ( X509CertImpl . NAME ) ) { // fully qualified int index = alias . lastIndexOf ( "." ) ; name = alias . substring ( index + 1 ) ; } else name = alias ; return map . get ( name ) ;
public class UrlSyntaxProviderImpl { /** * Determine the { @ link UrlState } to use for the targeted portlet window */ protected UrlState determineUrlState ( final IPortletWindow portletWindow , final IPortletUrlBuilder targetedPortletUrlBuilder ) { } }
final WindowState requestedWindowState ; if ( targetedPortletUrlBuilder == null ) { requestedWindowState = null ; } else { requestedWindowState = targetedPortletUrlBuilder . getWindowState ( ) ; } return determineUrlState ( portletWindow , requestedWindowState ) ;
public class VirtualNetworkGatewaysInner { /** * Creates or updates a virtual network gateway in the specified resource group . * @ param resourceGroupName The name of the resource group . * @ param virtualNetworkGatewayName The name of the virtual network gateway . * @ param parameters Parameters supplied to create or update virtual network gateway operation . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the VirtualNetworkGatewayInner object */ public Observable < VirtualNetworkGatewayInner > beginCreateOrUpdateAsync ( String resourceGroupName , String virtualNetworkGatewayName , VirtualNetworkGatewayInner parameters ) { } }
return beginCreateOrUpdateWithServiceResponseAsync ( resourceGroupName , virtualNetworkGatewayName , parameters ) . map ( new Func1 < ServiceResponse < VirtualNetworkGatewayInner > , VirtualNetworkGatewayInner > ( ) { @ Override public VirtualNetworkGatewayInner call ( ServiceResponse < VirtualNetworkGatewayInner > response ) { return response . body ( ) ; } } ) ;
public class WsHandshakeValidator { /** * Allow subclasses to register themselves as validators for specific versions of * the wire protocol . * @ param wireProtocolVersion the version of the protocol * @ param validator the validator */ private void register ( WebSocketWireProtocol wireProtocolVersion , WsHandshakeValidator validator ) { } }
if ( wireProtocolVersion == null ) { throw new NullPointerException ( "wireProtocolVersion" ) ; } if ( validator == null ) { throw new NullPointerException ( "validator" ) ; } WsHandshakeValidator existingValidator = handshakeValidatorsByWireProtocolVersion . put ( wireProtocolVersion , validator ) ; logger . trace ( "Class " + validator . getClass ( ) . getName ( ) + " registered to support websocket handshake for protocol " + wireProtocolVersion ) ; if ( existingValidator != null ) { logger . trace ( "Multiple handshake validators have registered to support wire protocol " + wireProtocolVersion + ". Using class " + this . getClass ( ) . getName ( ) + '.' ) ; }
public class VarExporter { /** * Write all variables , one per line , to the given writer , in the format " name = value " . * Will escape values for compatibility with loading into { @ link java . util . Properties } . * @ param out writer * @ param includeDoc true if documentation comments should be included */ public void dump ( final PrintWriter out , final boolean includeDoc ) { } }
visitVariables ( new Visitor ( ) { public void visit ( Variable var ) { var . write ( out , includeDoc ) ; } } ) ;
public class AbstractInstallPlanJob { /** * Install provided extension . * @ param targetDependency used to search the extension to install in remote repositories * @ param dependency indicate if the extension is installed as a dependency * @ param namespace the namespace where to install the extension * @ param managedDependencies the managed dependencies * @ param parents the parents extensions ( which triggered this extension install ) * @ return the install plan node for the provided extension * @ throws InstallException error when trying to install provided extension */ private ModifableExtensionPlanNode installExtensionDependency ( ExtensionDependency targetDependency , boolean dependency , String namespace , Map < String , ExtensionDependency > managedDependencies , Set < String > parents ) throws InstallException { } }
this . progressManager . pushLevelProgress ( 2 , this ) ; try { this . progressManager . startStep ( this ) ; // Check if the extension is already in local repository Extension extension = resolveExtension ( targetDependency ) ; // Rewrite the extension Extension rewrittenExtension ; if ( getRequest ( ) . getRewriter ( ) != null ) { rewrittenExtension = getRequest ( ) . getRewriter ( ) . rewrite ( extension ) ; } else { rewrittenExtension = extension ; } this . progressManager . endStep ( this ) ; this . progressManager . startStep ( this ) ; try { return installExtension ( extension , rewrittenExtension , dependency , namespace , targetDependency , managedDependencies , parents ) ; } catch ( Exception e ) { throw new InstallException ( String . format ( "Failed to create an install plan for extension dependency [%s]" , targetDependency ) , e ) ; } } finally { this . progressManager . popLevelProgress ( this ) ; }
public class SwaptionSingleCurve { /** * This method returns the value random variable of the product within the specified model , evaluated at a given evalutationTime . * Note : For a lattice this is often the value conditional to evalutationTime , for a Monte - Carlo simulation this is the ( sum of ) value discounted to evaluation time . * Cashflows prior evaluationTime are not considered . * @ param evaluationTime The time on which this products value should be observed . * @ param model The model used to price the product . * @ return The random variable representing the value of the product discounted to evaluation time * @ throws net . finmath . exception . CalculationException Thrown if the valuation fails , specific cause may be available via the < code > cause ( ) < / code > method . */ @ Override public RandomVariable getValue ( double evaluationTime , LIBORModelMonteCarloSimulationModel model ) throws CalculationException { } }
/* * Calculate value of the swap at exercise date on each path ( beware of perfect foresight - all rates are simulationTime = exerciseDate ) */ RandomVariable valueOfSwapAtExerciseDate = model . getRandomVariableForConstant ( /* fixingDates [ fixingDates . length - 1 ] , */ 0.0 ) ; // Calculate the value of the swap by working backward through all periods for ( int period = fixingDates . length - 1 ; period >= 0 ; period -- ) { double fixingDate = fixingDates [ period ] ; double paymentDate = paymentDates [ period ] ; double swaprate = swaprates [ period ] ; double periodLength = periodLengths != null ? periodLengths [ period ] : paymentDate - fixingDate ; // Get random variables - note that this is the rate at simulation time = exerciseDate RandomVariable libor = model . getLIBOR ( exerciseDate , fixingDate , paymentDate ) ; // Add payment received at end of period RandomVariable payoff = libor . sub ( swaprate ) . mult ( periodLength ) ; valueOfSwapAtExerciseDate = valueOfSwapAtExerciseDate . add ( payoff ) ; // Discount back to beginning of period valueOfSwapAtExerciseDate = valueOfSwapAtExerciseDate . discount ( libor , paymentDate - fixingDate ) ; } // If the exercise date is not the first periods start date , then discount back to the exercise date ( calculate the forward starting swap ) if ( fixingDates [ 0 ] != exerciseDate ) { RandomVariable libor = model . getLIBOR ( exerciseDate , exerciseDate , fixingDates [ 0 ] ) ; double periodLength = fixingDates [ 0 ] - exerciseDate ; // Discount back to beginning of period valueOfSwapAtExerciseDate = valueOfSwapAtExerciseDate . discount ( libor , periodLength ) ; } /* * Calculate swaption value */ RandomVariable values = valueOfSwapAtExerciseDate . floor ( 0.0 ) ; RandomVariable numeraire = model . getNumeraire ( exerciseDate ) ; RandomVariable monteCarloProbabilities = model . getMonteCarloWeights ( model . getTimeIndex ( exerciseDate ) ) ; values = values . div ( numeraire ) . mult ( monteCarloProbabilities ) ; RandomVariable numeraireAtZero = model . getNumeraire ( evaluationTime ) ; RandomVariable monteCarloProbabilitiesAtZero = model . getMonteCarloWeights ( evaluationTime ) ; values = values . mult ( numeraireAtZero ) . div ( monteCarloProbabilitiesAtZero ) ; return values ;
public class StatusConsoleListener { /** * Writes status messages to the console . * @ param data The StatusData . */ @ Override public void log ( final StatusData data ) { } }
if ( ! filtered ( data ) ) { stream . println ( data . getFormattedStatus ( ) ) ; }
public class QueryChemObject { /** * This should be triggered by an method that changes the content of an object * to that the registered listeners can react to it . This is a version of * notifyChanged ( ) which allows to propagate a change event while preserving * the original origin . * @ param evt A ChemObjectChangeEvent pointing to the source of where * the change happend */ @ Override public void notifyChanged ( IChemObjectChangeEvent evt ) { } }
if ( getNotification ( ) && getListenerCount ( ) > 0 ) { List < IChemObjectListener > listeners = lazyChemObjectListeners ( ) ; for ( Object listener : listeners ) { ( ( IChemObjectListener ) listener ) . stateChanged ( evt ) ; } }
public class SwapAnnuity { /** * Function to calculate an ( idealized ) swap annuity for a given schedule and discount curve . * Note that , the value returned is divided by the discount factor at evaluation . * This matters , if the discount factor at evaluationTime is not equal to 1.0. * @ param evaluationTime The evaluation time as double . Cash flows prior and including this time are not considered . * @ param schedule The schedule discretization , i . e . , the period start and end dates . End dates are considered payment dates and start of the next period . * @ param discountCurve The discount curve . * @ param model The model , needed only in case the discount curve evaluation depends on an additional curve . * @ return The swap annuity . */ public static double getSwapAnnuity ( double evaluationTime , ScheduleInterface schedule , DiscountCurveInterface discountCurve , AnalyticModelInterface model ) { } }
double value = 0.0 ; for ( int periodIndex = 0 ; periodIndex < schedule . getNumberOfPeriods ( ) ; periodIndex ++ ) { double paymentDate = schedule . getPayment ( periodIndex ) ; if ( paymentDate <= evaluationTime ) { continue ; } double periodLength = schedule . getPeriodLength ( periodIndex ) ; double discountFactor = discountCurve . getDiscountFactor ( model , paymentDate ) ; value += periodLength * discountFactor ; } return value / discountCurve . getDiscountFactor ( model , evaluationTime ) ;
public class CoordinationTransformer { /** * Transforms t if it contains a coordination in a flat structure ( CCtransform ) * and transforms UCP ( UCPtransform ) . * @ param t a tree to be transformed * @ return t transformed */ public Tree transformTree ( Tree t ) { } }
if ( VERBOSE ) { System . err . println ( "Input to CoordinationTransformer: " + t ) ; } Tree tx = tn . transformTree ( t ) ; if ( VERBOSE ) { System . err . println ( "After DependencyTreeTransformer: " + tx ) ; } if ( tx == null ) { return tx ; } Tree tt = UCPtransform ( tx ) ; if ( VERBOSE ) { System . err . println ( "After UCPTransformer: " + t ) ; } Tree ttt = CCtransform ( tt ) ; if ( VERBOSE ) { System . err . println ( "After CCTransformer: " + t ) ; } Tree ret = qp . transformTree ( ttt ) ; if ( VERBOSE ) { System . err . println ( "After QPTreeTransformer: " + t ) ; } return ret ;
public class ScriptingUtils { /** * Execute groovy script t . * @ param < T > the type parameter * @ param groovyScript the groovy script * @ param methodName the method name * @ param args the args * @ param clazz the clazz * @ param failOnError the fail on error * @ return the t */ @ SneakyThrows public static < T > T executeGroovyScript ( final Resource groovyScript , final String methodName , final Object [ ] args , final Class < T > clazz , final boolean failOnError ) { } }
if ( groovyScript == null || StringUtils . isBlank ( methodName ) ) { return null ; } try { return AccessController . doPrivileged ( ( PrivilegedAction < T > ) ( ) -> getGroovyResult ( groovyScript , methodName , args , clazz , failOnError ) ) ; } catch ( final Exception e ) { var cause = ( Throwable ) null ; if ( e instanceof PrivilegedActionException ) { cause = PrivilegedActionException . class . cast ( e ) . getException ( ) ; } else { cause = e ; } if ( failOnError ) { throw cause ; } LOGGER . error ( cause . getMessage ( ) , cause ) ; } return null ;
public class OptimizerOptions { /** * < code > optional . tensorflow . OptimizerOptions . GlobalJitLevel global _ jit _ level = 5 ; < / code > */ public org . tensorflow . framework . OptimizerOptions . GlobalJitLevel getGlobalJitLevel ( ) { } }
org . tensorflow . framework . OptimizerOptions . GlobalJitLevel result = org . tensorflow . framework . OptimizerOptions . GlobalJitLevel . valueOf ( globalJitLevel_ ) ; return result == null ? org . tensorflow . framework . OptimizerOptions . GlobalJitLevel . UNRECOGNIZED : result ;
public class Ifc2x3tc1FactoryImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public IfcThermalLoadSourceEnum createIfcThermalLoadSourceEnumFromString ( EDataType eDataType , String initialValue ) { } }
IfcThermalLoadSourceEnum result = IfcThermalLoadSourceEnum . get ( initialValue ) ; if ( result == null ) throw new IllegalArgumentException ( "The value '" + initialValue + "' is not a valid enumerator of '" + eDataType . getName ( ) + "'" ) ; return result ;
public class Apptentive { /** * This method takes a unique event string , stores a record of that event having been visited , * determines if there is an interaction that is able to run for this event , and then runs it . If * more than one interaction can run , then the most appropriate interaction takes precedence . Only * one interaction at most will run per invocation of this method . This task is performed * asynchronously . * @ param context The context from which to launch the Interaction . This should be an Activity , * except in rare cases where you don ' t have access to one , in which case * Apptentive Interactions will launch in a new task . * @ param event A unique String representing the line this method is called on . For instance , * you may want to have the ability to target interactions to run after the user * uploads a file in your app . You may then call * < strong > < code > engage ( context , " finished _ upload " ) ; < / code > < / strong > * @ param callback Called after we check to see if an Interaction should be displayed . Called with * true if an Interaction will be displayed , else false . */ public static synchronized void engage ( Context context , String event , BooleanCallback callback ) { } }
engage ( context , event , callback , null , ( ExtendedData [ ] ) null ) ;
public class VirtualMachineScaleSetsInner { /** * Upgrades one or more virtual machines to the latest SKU set in the VM scale set model . * @ param resourceGroupName The name of the resource group . * @ param vmScaleSetName The name of the VM scale set . * @ param instanceIds The virtual machine scale set instance ids . * @ param serviceCallback the async ServiceCallback to handle successful and failed responses . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the { @ link ServiceFuture } object */ public ServiceFuture < OperationStatusResponseInner > beginUpdateInstancesAsync ( String resourceGroupName , String vmScaleSetName , List < String > instanceIds , final ServiceCallback < OperationStatusResponseInner > serviceCallback ) { } }
return ServiceFuture . fromResponse ( beginUpdateInstancesWithServiceResponseAsync ( resourceGroupName , vmScaleSetName , instanceIds ) , serviceCallback ) ;
public class nstimer { /** * Use this API to unset the properties of nstimer resource . * Properties that need to be unset are specified in args array . */ public static base_response unset ( nitro_service client , nstimer resource , String [ ] args ) throws Exception { } }
nstimer unsetresource = new nstimer ( ) ; unsetresource . name = resource . name ; unsetresource . interval = resource . interval ; unsetresource . unit = resource . unit ; unsetresource . comment = resource . comment ; return unsetresource . unset_resource ( client , args ) ;
public class EventSourceImpl { /** * Removes the given EventSource listener from the listener list . * @ param listener * EventSourceListener to be unregistered */ public void removeEventSourceListener ( EventSourceListener listener ) { } }
LOG . entering ( CLASS_NAME , "removeEventSourceListener" , listener ) ; if ( listener == null ) { throw new NullPointerException ( "listener" ) ; } listeners . remove ( listener ) ;
public class ZoneMeta { /** * Returns an immutable set of canonical system time zone IDs that * are associated with actual locations . * The result set is a subset of { @ link # getCanonicalSystemZIDs ( ) } , but not * including IDs , such as " Etc / GTM + 5 " . * @ return An immutable set of canonical system time zone IDs that * are associated with actual locations . */ private static synchronized Set < String > getCanonicalSystemLocationZIDs ( ) { } }
Set < String > canonicalSystemLocationZones = null ; if ( REF_CANONICAL_SYSTEM_LOCATION_ZONES != null ) { canonicalSystemLocationZones = REF_CANONICAL_SYSTEM_LOCATION_ZONES . get ( ) ; } if ( canonicalSystemLocationZones == null ) { Set < String > canonicalSystemLocationIDs = new TreeSet < String > ( ) ; String [ ] allIDs = getZoneIDs ( ) ; for ( String id : allIDs ) { // exclude Etc / Unknown if ( id . equals ( TimeZone . UNKNOWN_ZONE_ID ) ) { continue ; } String canonicalID = getCanonicalCLDRID ( id ) ; if ( id . equals ( canonicalID ) ) { String region = getRegion ( id ) ; if ( region != null && ! region . equals ( kWorld ) ) { canonicalSystemLocationIDs . add ( id ) ; } } } canonicalSystemLocationZones = Collections . unmodifiableSet ( canonicalSystemLocationIDs ) ; REF_CANONICAL_SYSTEM_LOCATION_ZONES = new SoftReference < Set < String > > ( canonicalSystemLocationZones ) ; } return canonicalSystemLocationZones ;
public class FLVWriter { /** * Create the stream output file ; the flv itself . * @ throws IOException */ private void createOutputFile ( ) throws IOException { } }
this . fileChannel = Files . newByteChannel ( Paths . get ( filePath ) , StandardOpenOption . CREATE , StandardOpenOption . WRITE , StandardOpenOption . TRUNCATE_EXISTING ) ;
public class Database { /** * Sends a text message ( which will probably create a write access ) to the Neo4j cluster . * @ param message binary json message ( usually json format ) * @ param server server that shall be used to send the message * @ throws ConnectionNotAvailableException no connection to server exception */ public void sendWriteMessage ( final String message , final Server server ) throws ConnectionNotAvailableException { } }
DataConnection connection = server . getConnection ( ) ; if ( connection == null ) { throw new ConnectionNotAvailableException ( server ) ; } connection . send ( message ) ; server . returnConnection ( connection ) ;
public class DomainTransformers { /** * Initialize the domain registry . * @ param registry the domain registry */ public static void initializeDomainRegistry ( final TransformerRegistry registry ) { } }
// The chains for transforming will be as follows // For JBoss EAP : 8.0.0 - > 5.0.0 - > 4.0.0 - > 1.8.0 - > 1.7.0 - > 1.6.0 - > 1.5.0 registerRootTransformers ( registry ) ; registerChainedManagementTransformers ( registry ) ; registerChainedServerGroupTransformers ( registry ) ; registerProfileTransformers ( registry ) ; registerSocketBindingGroupTransformers ( registry ) ; registerDeploymentTransformers ( registry ) ;
public class SibRaEndpointActivation { /** * Returns a connection to the given messaging engine . * @ param messagingEngine * the messaging engine for which a connection is required * @ return the connection * @ throws IllegalStateException * if the endpoint is no longer active * @ throws ResourceException * if a new connection is required and the creation fails */ protected final SibRaMessagingEngineConnection getConnection ( final JsMessagingEngine messagingEngine ) throws ResourceException { } }
final String methodName = "getConnection" ; if ( TraceComponent . isAnyTracingEnabled ( ) && TRACE . isEntryEnabled ( ) ) { SibTr . entry ( this , TRACE , methodName , messagingEngine ) ; } SibRaMessagingEngineConnection connection ; if ( _active ) { synchronized ( _connections ) { /* * Do we already have a connection ? */ connection = ( SibRaMessagingEngineConnection ) _connections . get ( messagingEngine . getUuid ( ) . toString ( ) ) ; /* * If not , create a new one and add it to the map */ if ( connection == null ) { connection = new SibRaMessagingEngineConnection ( this , messagingEngine ) ; _connections . put ( messagingEngine . getUuid ( ) . toString ( ) , connection ) ; } } } else { throw new IllegalStateException ( NLS . getFormattedMessage ( "ENDPOINT_DEACTIVATED_CWSIV0554" , new Object [ ] { messagingEngine . getUuid ( ) , messagingEngine . getBusName ( ) , this } , null ) ) ; } if ( TraceComponent . isAnyTracingEnabled ( ) && TRACE . isEntryEnabled ( ) ) { SibTr . exit ( this , TRACE , methodName , connection ) ; } return connection ;
public class SessionDataManager { /** * Return item data by internal < b > qpath < / b > in this transient storage then in workspace * container . * @ param path * - absolute path * @ return existed item data or null if not found * @ throws RepositoryException * @ see org . exoplatform . services . jcr . dataflow . ItemDataConsumer # getItemData ( String ) */ public ItemData getItemData ( QPath path ) throws RepositoryException { } }
NodeData parent = ( NodeData ) getItemData ( Constants . ROOT_UUID ) ; if ( path . equals ( Constants . ROOT_PATH ) ) { return parent ; } QPathEntry [ ] relPathEntries = path . getRelPath ( path . getDepth ( ) ) ; return getItemData ( parent , relPathEntries , ItemType . UNKNOWN ) ;
public class JsonStreamWriter { /** * Write a string attribute . * @ param name attribute name * @ param value attribute value */ public void writeNameValuePair ( String name , String value ) throws IOException { } }
internalWriteNameValuePair ( name , escapeString ( value ) ) ;
public class Properties { /** * Loads all of the properties represented by the XML document on the * specified input stream into this properties table . * < p > The XML document must have the following DOCTYPE declaration : * < pre > * & lt ; ! DOCTYPE properties SYSTEM " http : / / java . sun . com / dtd / properties . dtd " & gt ; * < / pre > * Furthermore , the document must satisfy the properties DTD described * above . * < p > The specified stream is closed after this method returns . * @ param in the input stream from which to read the XML document . * @ throws IOException if reading from the specified input stream * results in an < tt > IOException < / tt > . * @ throws InvalidPropertiesFormatException Data on input stream does not * constitute a valid XML document with the mandated document type . * @ throws NullPointerException if < code > in < / code > is null . * @ see # storeToXML ( OutputStream , String , String ) * @ since 1.5 */ public synchronized void loadFromXML ( InputStream in ) throws IOException , InvalidPropertiesFormatException { } }
if ( in == null ) throw new NullPointerException ( ) ; XMLUtils . load ( this , in ) ; in . close ( ) ;
public class Strings { /** * XML - escapes all the elements in the target list . * @ param target the list of Strings to be escaped . * If non - String objects , toString ( ) will be called . * @ return a List with the result of each * each element of the target . * @ since 2.0.9 */ public List < String > listEscapeXml ( final List < ? > target ) { } }
if ( target == null ) { return null ; } final List < String > result = new ArrayList < String > ( target . size ( ) + 2 ) ; for ( final Object element : target ) { result . add ( escapeXml ( element ) ) ; } return result ;
public class CleverTapAPI { /** * Session */ private void clearIJ ( Context context ) { } }
final SharedPreferences prefs = StorageHelper . getPreferences ( context , Constants . NAMESPACE_IJ ) ; final SharedPreferences . Editor editor = prefs . edit ( ) ; editor . clear ( ) ; StorageHelper . persist ( editor ) ;
public class CmsUserOverviewDialog { /** * Calls the switch user method of the SessionManager . < p > * @ return the direct edit patch * @ throws CmsException if something goes wrong */ public String actionSwitchUser ( ) throws CmsException { } }
try { CmsSessionManager sessionManager = OpenCms . getSessionManager ( ) ; CmsUser user = getCms ( ) . readUser ( new CmsUUID ( getJsp ( ) . getRequest ( ) . getParameter ( "userid" ) ) ) ; return sessionManager . switchUser ( getCms ( ) , getJsp ( ) . getRequest ( ) , user ) ; } catch ( CmsException e ) { String toolPath = getCurrentToolPath ( ) . substring ( 0 , getCurrentToolPath ( ) . lastIndexOf ( "/" ) ) ; getToolManager ( ) . setCurrentToolPath ( this , toolPath ) ; throw e ; }
public class DescribeTargetHealthResult { /** * Information about the health of the targets . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setTargetHealthDescriptions ( java . util . Collection ) } or * { @ link # withTargetHealthDescriptions ( java . util . Collection ) } if you want to override the existing values . * @ param targetHealthDescriptions * Information about the health of the targets . * @ return Returns a reference to this object so that method calls can be chained together . */ public DescribeTargetHealthResult withTargetHealthDescriptions ( TargetHealthDescription ... targetHealthDescriptions ) { } }
if ( this . targetHealthDescriptions == null ) { setTargetHealthDescriptions ( new java . util . ArrayList < TargetHealthDescription > ( targetHealthDescriptions . length ) ) ; } for ( TargetHealthDescription ele : targetHealthDescriptions ) { this . targetHealthDescriptions . add ( ele ) ; } return this ;
public class Rule { /** * If there are only properties that should be inlined . * @ param formatter current formatter * @ return true , if only inline */ boolean hasOnlyInlineProperties ( CssFormatter formatter ) { } }
for ( Formattable prop : properties ) { if ( prop instanceof Mixin ) { return false ; } } for ( Rule rule : subrules ) { if ( rule . isValidCSS ( formatter ) && rule . isInlineRule ( formatter ) ) { return false ; } } return true ;
public class AltsHandshakerClient { /** * Processes the next bytes in a handshake . A GeneralSecurityException is thrown if the handshaker * service is interrupted or fails . Note that isFinished ( ) must be false before this function is * called . * @ param inBytes the bytes received from the peer . * @ return the frame to give to the peer . * @ throws GeneralSecurityException or IllegalStateException */ public ByteBuffer next ( ByteBuffer inBytes ) throws GeneralSecurityException { } }
Preconditions . checkState ( ! isFinished ( ) , "Handshake has already finished." ) ; HandshakerReq . Builder req = HandshakerReq . newBuilder ( ) . setNext ( NextHandshakeMessageReq . newBuilder ( ) . setInBytes ( ByteString . copyFrom ( inBytes . duplicate ( ) ) ) . build ( ) ) ; HandshakerResp resp ; try { resp = handshakerStub . send ( req . build ( ) ) ; } catch ( IOException | InterruptedException e ) { throw new GeneralSecurityException ( e ) ; } handleResponse ( resp ) ; inBytes . position ( inBytes . position ( ) + resp . getBytesConsumed ( ) ) ; return resp . getOutFrames ( ) . asReadOnlyByteBuffer ( ) ;
public class FileUtils { /** * Create temp file file . * @ param context the context * @ return the file */ @ SuppressWarnings ( "ResultOfMethodCallIgnored, unused" ) public static File createTempFile ( Context context ) { } }
String timeStamp = new SimpleDateFormat ( "yyyyMMdd_HHmmssSSS" , Locale . US ) . format ( new Date ( ) ) ; String tempFileName = PREFIX + timeStamp + "_" ; File cacheDir = context . getCacheDir ( ) ; if ( ! cacheDir . exists ( ) ) { cacheDir . mkdir ( ) ; } return new File ( cacheDir , tempFileName + EXTENSION ) ;
public class WeightedReservoirSampler { /** * Returns an integer at random , weighted according to its index * @ param weights weights to sample from * @ return index chosen according to the weight supplied */ public int randomIndexChoice ( List < Integer > weights ) { } }
int result = 0 , index ; double maxKey = 0.0 ; double u , key ; int weight ; for ( ListIterator < Integer > it = weights . listIterator ( ) ; it . hasNext ( ) ; ) { index = it . nextIndex ( ) ; weight = it . next ( ) ; u = random . nextDouble ( ) ; key = Math . pow ( u , ( 1.0 / weight ) ) ; // Protect from zero division ? if ( key > maxKey ) { maxKey = key ; result = index ; } } return result ;
public class DateType { /** * The value that can be set is a Date , a DateTime or a String * yyyy - MM - dd ' T ' HH : mm : ss . SSSZZ . It will be normalized to ISO Calender with * TimeZone from SystemAttribute Admin _ Common _ DataBaseTimeZone . In case that * the SystemAttribute is missing UTC will be used . * For storing the value in the database the time is set to 00:00; * @ param _ value value to evaluate * @ return evaluated value * @ throws EFapsException on error */ @ Override protected Timestamp eval ( final Object [ ] _value ) throws EFapsException { } }
final Timestamp ret ; if ( _value == null || _value . length == 0 || _value [ 0 ] == null ) { ret = null ; } else { DateTime dateTime = new DateTime ( ) ; if ( _value [ 0 ] instanceof Date ) { dateTime = new DateTime ( _value [ 0 ] ) ; } else if ( _value [ 0 ] instanceof DateTime ) { dateTime = ( DateTime ) _value [ 0 ] ; } else if ( _value [ 0 ] instanceof String ) { final String str = ( String ) _value [ 0 ] ; if ( str . length ( ) == 10 ) { final TemporalAccessor temp = DateTimeFormatter . ISO_LOCAL_DATE . parse ( str ) ; dateTime = new DateTime ( ) . withDate ( temp . get ( ChronoField . YEAR ) , temp . get ( ChronoField . MONTH_OF_YEAR ) , temp . get ( ChronoField . DAY_OF_MONTH ) ) ; } else { dateTime = ISODateTimeFormat . dateTime ( ) . withOffsetParsed ( ) . parseDateTime ( ( String ) _value [ 0 ] ) ; } } // until now we have a time that depends on the timezone of the application server // to convert it in a timestamp for the efaps database the timezone information ( mainly the offset ) // must be removed . This is done by creating a local date with the same , date and time . // this guarantees that the datetime inserted into the database depends on the setting // in the configuration and not on the timezone for the application server . final DateTime localized = new DateTime ( dateTime . getYear ( ) , dateTime . getMonthOfYear ( ) , dateTime . getDayOfMonth ( ) , 0 , 0 , 0 , 0 ) ; ret = localized != null ? new Timestamp ( localized . getMillis ( ) ) : null ; } return ret ;
public class JDBCClob { /** * Writes the given Java < code > String < / code > to the < code > CLOB < / code > * value that this < code > Clob < / code > object designates at the position * < code > pos < / code > . The string will overwrite the existing characters * in the < code > Clob < / code > object starting at the position * < code > pos < / code > . If the end of the < code > Clob < / code > value is reached * while writing the given string , then the length of the < code > Clob < / code > * value will be increased to accomodate the extra characters . * < b > Note : < / b > If the value specified for < code > pos < / code > * is greater then the length + 1 of the < code > CLOB < / code > value then the * behavior is undefined . Some JDBC drivers may throw a * < code > SQLException < / code > while other drivers may support this * operation . * < ! - - start release - specific documentation - - > * < div class = " ReleaseSpecificDocumentation " > * < h3 > HSQLDB - Specific Information : < / h3 > < p > * Starting with HSQLDB 1.9.0 this feature is supported . < p > * When built under JDK 1.6 + and the Clob instance is constructed as a * result of calling JDBCConnection . createClob ( ) , this operation affects * only the client - side value ; it has no effect upon a value stored in the * database because JDBCConnection . createClob ( ) constructs disconnected , * initially empty Clob instances . To propogate the Clob value to a database * in this case , it is required to supply the Clob instance to an updating * or inserting setXXX method of a Prepared or Callable Statement , or to * supply the Clob instance to an updateXXX method of an updateable * ResultSet . < p > * < b > Implementation Notes : < / b > < p > * No attempt is made to ensure precise thread safety . Instead , volatile * member field and local variable snapshot isolation semantics are * implemented . This is expected to eliminate most issues related * to race conditions , with the possible exception of concurrent * invocation of free ( ) . < p > * In general , however , if an application may perform concurrent * JDBCClob modifications and the integrity of the application depends on * total order Clob modification semantics , then such operations * should be synchronized on an appropriate monitor . < p > * < / div > * < ! - - end release - specific documentation - - > * @ param pos the position at which to start writing to the < code > CLOB < / code > * value that this < code > Clob < / code > object represents ; * The first position is 1 * @ param str the string to be written to the < code > CLOB < / code > * value that this < code > Clob < / code > designates * @ return the number of characters written * @ exception SQLException if there is an error accessing the * < code > CLOB < / code > value or if pos is less than 1 * @ exception SQLFeatureNotSupportedException if the JDBC driver does not support * this method * @ since JDK 1.4 , HSQLDB 1.7.2 * @ revised JDK 1.6 , HSQLDB 1.9.0 */ public int setString ( long pos , String str ) throws SQLException { } }
if ( str == null ) { throw Util . nullArgument ( "str" ) ; } return setString ( pos , str , 0 , str . length ( ) ) ;
public class Http { /** * The output format for your data : * json _ meta - The current default format , where each payload contains a full JSON document . It contains metadata * and an " interactions " property that has an array of interactions . * json _ array - The payload is a full JSON document , but just has an array of interactions . * json _ new _ line - The payload is NOT a full JSON document . Each interaction is flattened and separated by a line * break . * If you omit this parameter or set it to json _ meta , your output consists of JSON metadata followed by a JSON * array of interactions ( wrapped in square brackets and separated by commas ) . * Take a look at our Sample Output for File - Based Connectors page . * If you select json _ array , DataSift omits the metadata and sends just the array of interactions . * If you select json _ new _ line , DataSift omits the metadata and sends each interaction as a single JSON object . * @ return this */ public Http format ( HttpFormat format ) { } }
String strFormat ; switch ( format ) { case JSON_ARRAY : strFormat = "json_array" ; break ; case JSON_NEW_LINE : strFormat = "json_new_line" ; break ; default : case JSON_META : strFormat = "json_meta" ; break ; } return setParam ( "format" , strFormat ) ;
public class AbstractScriptProvider { /** * 加载所有的脚本类 * @ param jarFiles * @ throws IOException * @ throws ClassNotFoundException * @ throws IllegalAccessException * @ throws InstantiationException */ protected final void loadScirptClass ( ) throws Exception { } }
if ( state == State . loading ) { return ; } rwLock . writeLock ( ) . lock ( ) ; try { state = State . loading ; ScriptClassLoader loader = loadClassByLoader ( ) ; Set < Class < ? > > allClass = loader . findedClass ; Set < Class < ? extends T > > scriptClass = findScriptClass ( allClass ) ; Map < Integer , Class < ? extends T > > newCodeMap = findInstanceAbleScript ( scriptClass ) ; this . codeMap . clear ( ) ; this . codeMap . putAll ( newCodeMap ) ; this . classLoader = loader ; this . classLoader . close ( ) ; // 关闭资源文件引用 } finally { state = State . loaded ; rwLock . writeLock ( ) . unlock ( ) ; }
public class WDTimerImpl { /** * Ping a watchdog . * @ throws IOException */ @ Override public void heartbeat ( ) throws IOException { } }
isOpen ( ) ; int ret = WDT . ping ( fd ) ; if ( ret < 0 ) { throw new IOException ( "Heartbeat error. File " + filename + " got " + ret + " back." ) ; }
public class ReactionSet { /** * Adds an reaction to this container . * @ param reaction The reaction to be added to this container */ @ Override public void addReaction ( IReaction reaction ) { } }
if ( reactionCount + 1 >= reactions . length ) growReactionArray ( ) ; reactions [ reactionCount ] = reaction ; reactionCount ++ ;
public class PairSet { /** * Gets the set of transactions in { @ link # allTransactions ( ) } that contains * at least one item * @ return the set of transactions in { @ link # allTransactions ( ) } that * contains at least one item */ public IndexedSet < T > involvedTransactions ( ) { } }
IndexedSet < T > res = allTransactions . empty ( ) ; res . indices ( ) . addAll ( matrix . involvedRows ( ) ) ; return res ;
public class GobblinAWSUtils { /** * Initiates an orderly shutdown in which previously submitted * tasks are executed , but no new tasks are accepted . * Invocation has no additional effect if already shut down . * This also blocks until all tasks have completed execution * request , or the timeout occurs , or the current thread is * interrupted , whichever happens first . * @ param clazz { @ link Class } that invokes shutdown on the { @ link ExecutorService } . * @ param executorService { @ link ExecutorService } to shutdown . * @ param logger { @ link Logger } to log shutdown for invoking class . * @ throws InterruptedException if shutdown is interrupted . */ public static void shutdownExecutorService ( Class clazz , ExecutorService executorService , Logger logger ) throws InterruptedException { } }
executorService . shutdown ( ) ; if ( ! executorService . awaitTermination ( DEFAULT_EXECUTOR_SERVICE_SHUTDOWN_TIME_IN_MINUTES , TimeUnit . MINUTES ) ) { logger . warn ( "Executor service shutdown timed out." ) ; List < Runnable > pendingTasks = executorService . shutdownNow ( ) ; logger . warn ( String . format ( "%s was shutdown instantly. %s tasks were not executed: %s" , clazz . getName ( ) , pendingTasks . size ( ) , StringUtils . join ( pendingTasks , "," ) ) ) ; }
public class MtasSolrSearchComponent { /** * ( non - Javadoc ) * @ see * org . apache . solr . handler . component . SearchComponent # process ( org . apache . solr . * handler . component . ResponseBuilder ) */ @ Override public void process ( ResponseBuilder rb ) throws IOException { } }
// System . out // . println ( System . nanoTime ( ) + " - " + Thread . currentThread ( ) . getId ( ) // + " - " + rb . req . getParams ( ) . getBool ( ShardParams . IS _ SHARD , false ) // + " PROCESS " + rb . stage + " " + rb . req . getParamString ( ) ) ; MtasSolrStatus solrStatus = Objects . requireNonNull ( ( MtasSolrStatus ) rb . req . getContext ( ) . get ( MtasSolrStatus . class ) , "couldn't find status" ) ; solrStatus . setStage ( rb . stage ) ; try { if ( rb . req . getParams ( ) . getBool ( PARAM_MTAS , false ) ) { try { ComponentFields mtasFields = getMtasFields ( rb ) ; if ( mtasFields != null ) { DocSet docSet = rb . getResults ( ) . docSet ; DocList docList = rb . getResults ( ) . docList ; if ( mtasFields . doStats || mtasFields . doDocument || mtasFields . doKwic || mtasFields . doList || mtasFields . doGroup || mtasFields . doFacet || mtasFields . doCollection || mtasFields . doTermVector || mtasFields . doPrefix || mtasFields . doStatus || mtasFields . doVersion ) { SolrIndexSearcher searcher = rb . req . getSearcher ( ) ; ArrayList < Integer > docSetList = null ; ArrayList < Integer > docListList = null ; // initialise docSetList if ( docSet != null ) { docSetList = new ArrayList < > ( ) ; Iterator < Integer > docSetIterator = docSet . iterator ( ) ; while ( docSetIterator . hasNext ( ) ) { docSetList . add ( docSetIterator . next ( ) ) ; } Collections . sort ( docSetList ) ; } // initialise docListList if ( docList != null ) { docListList = new ArrayList < > ( ) ; Iterator < Integer > docListIterator = docList . iterator ( ) ; while ( docListIterator . hasNext ( ) ) { docListList . add ( docListIterator . next ( ) ) ; } Collections . sort ( docListList ) ; } solrStatus . status ( ) . addSubs ( mtasFields . list . keySet ( ) ) ; for ( String field : mtasFields . list . keySet ( ) ) { try { CodecUtil . collectField ( field , searcher , searcher . getRawReader ( ) , docListList , docSetList , mtasFields . list . get ( field ) , solrStatus . status ( ) ) ; } catch ( IllegalAccessException | IllegalArgumentException | InvocationTargetException e ) { log . error ( e ) ; throw new IOException ( e ) ; } } for ( ComponentCollection collection : mtasFields . collection ) { CodecUtil . collectCollection ( searcher . getRawReader ( ) , docSetList , collection ) ; } NamedList < Object > mtasResponse = new SimpleOrderedMap < > ( ) ; if ( mtasFields . doVersion ) { SimpleOrderedMap < Object > versionResponse = searchVersion . create ( mtasFields . version , false ) ; mtasResponse . add ( MtasSolrComponentVersion . NAME , versionResponse ) ; } if ( mtasFields . doStatus ) { // add to response SimpleOrderedMap < Object > statusResponse = searchStatus . create ( mtasFields . status , false ) ; if ( statusResponse != null ) { mtasResponse . add ( MtasSolrComponentStatus . NAME , searchStatus . create ( mtasFields . status , false ) ) ; } } if ( mtasFields . doDocument ) { ArrayList < NamedList < ? > > mtasDocumentResponses = new ArrayList < > ( ) ; for ( String field : mtasFields . list . keySet ( ) ) { for ( ComponentDocument document : mtasFields . list . get ( field ) . documentList ) { mtasDocumentResponses . add ( searchDocument . create ( document , false ) ) ; } } // add to response mtasResponse . add ( MtasSolrComponentDocument . NAME , mtasDocumentResponses ) ; } if ( mtasFields . doKwic ) { ArrayList < NamedList < ? > > mtasKwicResponses = new ArrayList < > ( ) ; for ( String field : mtasFields . list . keySet ( ) ) { for ( ComponentKwic kwic : mtasFields . list . get ( field ) . kwicList ) { mtasKwicResponses . add ( searchKwic . create ( kwic , false ) ) ; } } // add to response mtasResponse . add ( MtasSolrComponentKwic . NAME , mtasKwicResponses ) ; } if ( mtasFields . doFacet ) { ArrayList < NamedList < ? > > mtasFacetResponses = new ArrayList < > ( ) ; for ( String field : mtasFields . list . keySet ( ) ) { for ( ComponentFacet facet : mtasFields . list . get ( field ) . facetList ) { if ( rb . req . getParams ( ) . getBool ( ShardParams . IS_SHARD , false ) ) { mtasFacetResponses . add ( searchFacet . create ( facet , true ) ) ; } else { mtasFacetResponses . add ( searchFacet . create ( facet , false ) ) ; } } } // add to response mtasResponse . add ( MtasSolrComponentFacet . NAME , mtasFacetResponses ) ; } if ( mtasFields . doCollection ) { ArrayList < NamedList < ? > > mtasCollectionResponses = new ArrayList < > ( ) ; for ( ComponentCollection collection : mtasFields . collection ) { if ( rb . req . getParams ( ) . getBool ( ShardParams . IS_SHARD , false ) ) { mtasCollectionResponses . add ( searchCollection . create ( collection , true ) ) ; } else { mtasCollectionResponses . add ( searchCollection . create ( collection , false ) ) ; } } // add to response mtasResponse . add ( MtasSolrComponentCollection . NAME , mtasCollectionResponses ) ; } if ( mtasFields . doList ) { ArrayList < NamedList < ? > > mtasListResponses = new ArrayList < > ( ) ; for ( String field : mtasFields . list . keySet ( ) ) { for ( ComponentList list : mtasFields . list . get ( field ) . listList ) { mtasListResponses . add ( searchList . create ( list , false ) ) ; } } // add to response mtasResponse . add ( MtasSolrComponentList . NAME , mtasListResponses ) ; } if ( mtasFields . doGroup ) { ArrayList < NamedList < ? > > mtasGroupResponses = new ArrayList < > ( ) ; for ( String field : mtasFields . list . keySet ( ) ) { for ( ComponentGroup group : mtasFields . list . get ( field ) . groupList ) { if ( rb . req . getParams ( ) . getBool ( ShardParams . IS_SHARD , false ) ) { mtasGroupResponses . add ( searchGroup . create ( group , true ) ) ; } else { mtasGroupResponses . add ( searchGroup . create ( group , false ) ) ; } } } // add to response mtasResponse . add ( MtasSolrComponentGroup . NAME , mtasGroupResponses ) ; } if ( mtasFields . doTermVector ) { ArrayList < NamedList < ? > > mtasTermVectorResponses = new ArrayList < > ( ) ; for ( String field : mtasFields . list . keySet ( ) ) { for ( ComponentTermVector termVector : mtasFields . list . get ( field ) . termVectorList ) { if ( rb . req . getParams ( ) . getBool ( ShardParams . IS_SHARD , false ) ) { mtasTermVectorResponses . add ( searchTermvector . create ( termVector , true ) ) ; } else { mtasTermVectorResponses . add ( searchTermvector . create ( termVector , false ) ) ; } } } // add to response mtasResponse . add ( MtasSolrComponentTermvector . NAME , mtasTermVectorResponses ) ; } if ( mtasFields . doPrefix ) { ArrayList < NamedList < ? > > mtasPrefixResponses = new ArrayList < > ( ) ; for ( String field : mtasFields . list . keySet ( ) ) { if ( mtasFields . list . get ( field ) . prefix != null ) { if ( rb . req . getParams ( ) . getBool ( ShardParams . IS_SHARD , false ) ) { mtasPrefixResponses . add ( searchPrefix . create ( mtasFields . list . get ( field ) . prefix , true ) ) ; } else { mtasPrefixResponses . add ( searchPrefix . create ( mtasFields . list . get ( field ) . prefix , false ) ) ; } } } mtasResponse . add ( MtasSolrComponentPrefix . NAME , mtasPrefixResponses ) ; } if ( mtasFields . doStats ) { NamedList < Object > mtasStatsResponse = new SimpleOrderedMap < > ( ) ; if ( mtasFields . doStatsPositions || mtasFields . doStatsTokens || mtasFields . doStatsSpans ) { if ( mtasFields . doStatsTokens ) { ArrayList < Object > mtasStatsTokensResponses = new ArrayList < > ( ) ; for ( String field : mtasFields . list . keySet ( ) ) { for ( ComponentToken token : mtasFields . list . get ( field ) . statsTokenList ) { if ( rb . req . getParams ( ) . getBool ( ShardParams . IS_SHARD , false ) ) { mtasStatsTokensResponses . add ( searchStats . create ( token , true ) ) ; } else { mtasStatsTokensResponses . add ( searchStats . create ( token , false ) ) ; } } } mtasStatsResponse . add ( MtasSolrComponentStats . NAME_TOKENS , mtasStatsTokensResponses ) ; } if ( mtasFields . doStatsPositions ) { ArrayList < Object > mtasStatsPositionsResponses = new ArrayList < > ( ) ; for ( String field : mtasFields . list . keySet ( ) ) { for ( ComponentPosition position : mtasFields . list . get ( field ) . statsPositionList ) { if ( rb . req . getParams ( ) . getBool ( ShardParams . IS_SHARD , false ) ) { mtasStatsPositionsResponses . add ( searchStats . create ( position , true ) ) ; } else { mtasStatsPositionsResponses . add ( searchStats . create ( position , false ) ) ; } } } mtasStatsResponse . add ( MtasSolrComponentStats . NAME_POSITIONS , mtasStatsPositionsResponses ) ; } if ( mtasFields . doStatsSpans ) { ArrayList < Object > mtasStatsSpansResponses = new ArrayList < > ( ) ; for ( String field : mtasFields . list . keySet ( ) ) { for ( ComponentSpan span : mtasFields . list . get ( field ) . statsSpanList ) { if ( rb . req . getParams ( ) . getBool ( ShardParams . IS_SHARD , false ) ) { mtasStatsSpansResponses . add ( searchStats . create ( span , true ) ) ; } else { mtasStatsSpansResponses . add ( searchStats . create ( span , false ) ) ; } } } mtasStatsResponse . add ( MtasSolrComponentStats . NAME_SPANS , mtasStatsSpansResponses ) ; } // add to response mtasResponse . add ( MtasSolrComponentStats . NAME , mtasStatsResponse ) ; } } // add to response if ( mtasResponse . size ( ) > 0 ) { rb . rsp . add ( NAME , mtasResponse ) ; } } } } catch ( IOException e ) { errorStatus ( solrStatus , e ) ; } } if ( ! solrStatus . error ( ) ) { // always set status segments if ( solrStatus . status ( ) . numberSegmentsTotal == null ) { solrStatus . status ( ) . numberSegmentsTotal = rb . req . getSearcher ( ) . getRawReader ( ) . leaves ( ) . size ( ) ; solrStatus . status ( ) . numberSegmentsFinished = solrStatus . status ( ) . numberSegmentsTotal ; } // always try to set number of documents if ( solrStatus . status ( ) . numberDocumentsTotal == null ) { SolrIndexSearcher searcher ; if ( ( searcher = rb . req . getSearcher ( ) ) != null ) { solrStatus . status ( ) . numberDocumentsTotal = ( long ) searcher . numDocs ( ) ; if ( rb . getResults ( ) . docList != null ) { solrStatus . status ( ) . numberDocumentsFinished = rb . getResults ( ) . docList . matches ( ) ; solrStatus . status ( ) . numberDocumentsFound = rb . getResults ( ) . docList . matches ( ) ; } else if ( rb . getResults ( ) . docSet != null ) { solrStatus . status ( ) . numberDocumentsFinished = ( long ) rb . getResults ( ) . docSet . size ( ) ; solrStatus . status ( ) . numberDocumentsFound = ( long ) rb . getResults ( ) . docSet . size ( ) ; } } } } } catch ( ExitableDirectoryReader . ExitingReaderException e ) { solrStatus . setError ( e . getMessage ( ) ) ; } finally { checkStatus ( solrStatus ) ; finishStatus ( solrStatus ) ; }
public class IOUtil { /** * Parse the specified Reader line by line . * @ param reader * @ param lineOffset * @ param count * @ param processThreadNum new threads started to parse / process the lines / records * @ param queueSize size of queue to save the processing records / lines loaded from source data . Default size is 1024. * @ param lineParser * @ param onComplete */ public static < E extends Exception , E2 extends Exception > void parse ( final Reader reader , final long lineOffset , final long count , final int processThreadNum , final int queueSize , final Try . Consumer < String , E > lineParser , final Try . Runnable < E2 > onComplete ) throws UncheckedIOException , E , E2 { } }
Iterables . parse ( new LineIterator ( reader ) , lineOffset , count , processThreadNum , queueSize , lineParser , onComplete ) ;
public class HttpConnectionUtil { /** * Logs details about the request error . * @ param response * http response * @ throws IOException * on IO error * @ throws ParseException * on parse error */ public static void handleError ( HttpResponse response ) throws ParseException , IOException { } }
log . debug ( "{}" , response . getStatusLine ( ) . toString ( ) ) ; HttpEntity entity = response . getEntity ( ) ; if ( entity != null ) { log . debug ( "{}" , EntityUtils . toString ( entity ) ) ; }
public class CPSpecificationOptionWrapper { /** * Returns the localized description of this cp specification option in the language , optionally using the default language if no localization exists for the requested language . * @ param languageId the ID of the language * @ param useDefault whether to use the default language if no localization exists for the requested language * @ return the localized description of this cp specification option */ @ Override public String getDescription ( String languageId , boolean useDefault ) { } }
return _cpSpecificationOption . getDescription ( languageId , useDefault ) ;
public class GalleryWidgetPresenter { /** * - It ' s refresh rate is greater than 60 seconds ( avoid tons of notifications in " real - time " scenarios ) */ private void onDataSetModifiedEvent ( @ Observes DataSetModifiedEvent event ) { } }
checkNotNull ( "event" , event ) ; DataSetDef def = event . getDataSetDef ( ) ; String targetUUID = event . getDataSetDef ( ) . getUUID ( ) ; TimeAmount timeFrame = def . getRefreshTimeAmount ( ) ; boolean noRealTime = timeFrame == null || timeFrame . toMillis ( ) > 60000 ; if ( ( ! def . isRefreshAlways ( ) || noRealTime ) && widget != null && widget . feedsFrom ( targetUUID ) ) { workbenchNotification . fire ( new NotificationEvent ( AppConstants . INSTANCE . gallerywidget_dataset_modif ( ) , INFO ) ) ; widget . redrawAll ( ) ; }
public class StreamHelpers { /** * Reads at most ' maxLength ' bytes from the given input stream , as long as the stream still has data to serve . * @ param stream The InputStream to read from . * @ param target The target array to write data to . * @ param startOffset The offset within the target array to start writing data to . * @ param maxLength The maximum number of bytes to copy . * @ return The number of bytes copied . * @ throws IOException If unable to read from the given stream . */ public static int readAll ( InputStream stream , byte [ ] target , int startOffset , int maxLength ) throws IOException { } }
Preconditions . checkNotNull ( stream , "stream" ) ; Preconditions . checkNotNull ( stream , "target" ) ; Preconditions . checkElementIndex ( startOffset , target . length , "startOffset" ) ; Exceptions . checkArgument ( maxLength >= 0 , "maxLength" , "maxLength must be a non-negative number." ) ; int totalBytesRead = 0 ; while ( totalBytesRead < maxLength ) { int bytesRead = stream . read ( target , startOffset + totalBytesRead , maxLength - totalBytesRead ) ; if ( bytesRead < 0 ) { // End of stream / break ; } totalBytesRead += bytesRead ; } return totalBytesRead ;
public class ApiOvhHorizonView { /** * Link your Active Directory to your CDI Active Directory * REST : POST / horizonView / { serviceName } / domainTrust * @ param activeDirectoryIP [ required ] IP of your Active Directory * @ param domain [ required ] Domain of your active directory ( for example domain . local ) * @ param dns2 [ required ] IP of your second DNS * @ param dns1 [ required ] IP of your first DNS * @ param serviceName [ required ] Domain of the service */ public ArrayList < OvhTask > serviceName_domainTrust_POST ( String serviceName , String activeDirectoryIP , String dns1 , String dns2 , String domain ) throws IOException { } }
String qPath = "/horizonView/{serviceName}/domainTrust" ; StringBuilder sb = path ( qPath , serviceName ) ; HashMap < String , Object > o = new HashMap < String , Object > ( ) ; addBody ( o , "activeDirectoryIP" , activeDirectoryIP ) ; addBody ( o , "dns1" , dns1 ) ; addBody ( o , "dns2" , dns2 ) ; addBody ( o , "domain" , domain ) ; String resp = exec ( qPath , "POST" , sb . toString ( ) , o ) ; return convertTo ( resp , t2 ) ;
public class MetadataReferenceDao { /** * { @ inheritDoc } * Update using the foreign key columns */ @ Override public int update ( MetadataReference metadataReference ) throws SQLException { } }
UpdateBuilder < MetadataReference , Void > ub = updateBuilder ( ) ; ub . updateColumnValue ( MetadataReference . COLUMN_REFERENCE_SCOPE , metadataReference . getReferenceScope ( ) . getValue ( ) ) ; ub . updateColumnValue ( MetadataReference . COLUMN_TABLE_NAME , metadataReference . getTableName ( ) ) ; ub . updateColumnValue ( MetadataReference . COLUMN_COLUMN_NAME , metadataReference . getColumnName ( ) ) ; ub . updateColumnValue ( MetadataReference . COLUMN_ROW_ID_VALUE , metadataReference . getRowIdValue ( ) ) ; ub . updateColumnValue ( MetadataReference . COLUMN_TIMESTAMP , metadataReference . getTimestamp ( ) ) ; setFkWhere ( ub . where ( ) , metadataReference . getFileId ( ) , metadataReference . getParentId ( ) ) ; PreparedUpdate < MetadataReference > update = ub . prepare ( ) ; int updated = update ( update ) ; return updated ;
public class MultiChoiceListPreference { /** * Creates and returns a listener , which allows to observe when list items are selected or * unselected by the user . * @ return The listener , which has been created , as an instance of the type { @ link * OnMultiChoiceClickListener } */ private OnMultiChoiceClickListener createListItemListener ( ) { } }
return new OnMultiChoiceClickListener ( ) { @ Override public void onClick ( final DialogInterface dialog , final int which , final boolean isChecked ) { if ( isChecked ) { selectedIndices . add ( which ) ; } else { selectedIndices . remove ( which ) ; } } } ;
public class DocumentUrl { /** * Get Resource Url for GetDocument * @ param documentId Unique identifier for a document , used by content and document calls . Document IDs are associated with document types , document type lists , sites , and tenants . * @ param documentListName Name of content documentListName to delete * @ param includeInactive Include inactive content . * @ param responseFields Filtering syntax appended to an API call to increase or decrease the amount of data returned inside a JSON object . This parameter should only be used to retrieve data . Attempting to update data using this parameter may cause data loss . * @ return String Resource Url */ public static MozuUrl getDocumentUrl ( String documentId , String documentListName , Boolean includeInactive , String responseFields ) { } }
UrlFormatter formatter = new UrlFormatter ( "/api/content/documentlists/{documentListName}/documents/{documentId}?includeInactive={includeInactive}&responseFields={responseFields}" ) ; formatter . formatUrl ( "documentId" , documentId ) ; formatter . formatUrl ( "documentListName" , documentListName ) ; formatter . formatUrl ( "includeInactive" , includeInactive ) ; formatter . formatUrl ( "responseFields" , responseFields ) ; return new MozuUrl ( formatter . getResourceUrl ( ) , MozuUrl . UrlLocation . TENANT_POD ) ;
public class State { /** * { @ inheritDoc } */ @ Override public < B > State < S , B > pure ( B b ) { } }
return state ( s -> tuple ( b , s ) ) ;
public class GraphLoader { /** * Method for loading a weighted graph from an edge list file , where each edge ( inc . weight ) is represented by a * single line . Graph may be directed or undirected < br > * This method assumes that edges are of the format : { @ code fromIndex < delim > toIndex < delim > edgeWeight } where { @ code < delim > } * is the delimiter . * @ param path Path to the edge list file * @ param numVertices The number of vertices in the graph * @ param delim The delimiter used in the file ( typically : " , " or " " etc ) * @ param directed whether the edges should be treated as directed ( true ) or undirected ( false ) * @ param allowMultipleEdges If set to false , the graph will not allow multiple edges between any two vertices to exist . However , * checking for duplicates during graph loading can be costly , so use allowMultipleEdges = true when * possible . * @ param ignoreLinesStartingWith Starting characters for comment lines . May be null . For example : " / / " or " # " * @ return The graph * @ throws IOException */ public static Graph < String , Double > loadWeightedEdgeListFile ( String path , int numVertices , String delim , boolean directed , boolean allowMultipleEdges , String ... ignoreLinesStartingWith ) throws IOException { } }
Graph < String , Double > graph = new Graph < > ( numVertices , allowMultipleEdges , new StringVertexFactory ( ) ) ; EdgeLineProcessor < Double > lineProcessor = new WeightedEdgeLineProcessor ( delim , directed , ignoreLinesStartingWith ) ; try ( BufferedReader br = new BufferedReader ( new FileReader ( new File ( path ) ) ) ) { String line ; while ( ( line = br . readLine ( ) ) != null ) { Edge < Double > edge = lineProcessor . processLine ( line ) ; if ( edge != null ) { graph . addEdge ( edge ) ; } } } return graph ;
public class Event { /** * A list of resources referenced by the event returned . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setResources ( java . util . Collection ) } or { @ link # withResources ( java . util . Collection ) } if you want to * override the existing values . * @ param resources * A list of resources referenced by the event returned . * @ return Returns a reference to this object so that method calls can be chained together . */ public Event withResources ( Resource ... resources ) { } }
if ( this . resources == null ) { setResources ( new com . amazonaws . internal . SdkInternalList < Resource > ( resources . length ) ) ; } for ( Resource ele : resources ) { this . resources . add ( ele ) ; } return this ;
public class CSVLoader { /** * Load schema contents from m _ config . schema file . */ private String getSchema ( ) { } }
if ( Utils . isEmpty ( m_config . schema ) ) { m_config . schema = m_config . root + m_config . app + ".xml" ; } File schemaFile = new File ( m_config . schema ) ; if ( ! schemaFile . exists ( ) ) { logErrorThrow ( "Schema file not found: {}" , m_config . schema ) ; } StringBuilder schemaBuffer = new StringBuilder ( ) ; char [ ] charBuffer = new char [ 65536 ] ; try ( FileReader reader = new FileReader ( schemaFile ) ) { for ( int bytesRead = reader . read ( charBuffer ) ; bytesRead > 0 ; bytesRead = reader . read ( charBuffer ) ) { schemaBuffer . append ( charBuffer , 0 , bytesRead ) ; } } catch ( Exception e ) { logErrorThrow ( "Cannot read schema file '{}': {}" , m_config . schema , e ) ; } return schemaBuffer . toString ( ) ;
public class MapModel { /** * Count the total number of raster layers in this model . * @ return number of raster layers */ private int rasterLayerCount ( ) { } }
int rasterLayerCount = 0 ; for ( int index = 0 ; index < mapInfo . getLayers ( ) . size ( ) ; index ++ ) { if ( layers . get ( index ) instanceof RasterLayer ) { rasterLayerCount ++ ; } } return rasterLayerCount ;
public class MoneyFormatterBuilder { /** * Appends the specified formatters , one used when the amount is positive , * and one when the amount is negative . * When printing , the amount is queried and the appropriate formatter is used . * When parsing , each formatter is tried , with the longest successful match , * or the first match if multiple are successful . If the negative parser is * matched , the amount returned will be negative no matter what amount is parsed . * A typical use case for this would be to produce a format like * ' { @ code ( $ 123 ) } ' for negative amounts and ' { @ code $ 123 } ' for positive amounts . * In order to use this method , it may be necessary to output an unsigned amount . * This can be achieved using { @ link # appendAmount ( MoneyAmountStyle ) } and * { @ link MoneyAmountStyle # withAbsValue ( boolean ) } . * @ param whenPositiveOrZero the formatter to use when the amount is positive or zero * @ param whenNegative the formatter to use when the amount is negative * @ return this for chaining , never null */ public MoneyFormatterBuilder appendSigned ( MoneyFormatter whenPositiveOrZero , MoneyFormatter whenNegative ) { } }
return appendSigned ( whenPositiveOrZero , whenPositiveOrZero , whenNegative ) ;
public class ReverseBinaryEncoder { /** * Copies the current contents of the Ion binary - encoded byte array into a * new byte array . The allocates an array of the size needed to exactly hold * the output and copies the entire byte array to it . * This makes an unchecked assumption that { { @ link # serialize ( IonDatagram ) } * is already called . * @ return the newly allocated byte array */ byte [ ] toNewByteArray ( ) { } }
int length = myBuffer . length - myOffset ; byte [ ] bytes = new byte [ length ] ; System . arraycopy ( myBuffer , myOffset , bytes , 0 , length ) ; return bytes ;
public class MMapGraphStructure { /** * Add a new edge into the graph . * @ param e the edge that will be added into the graph . */ @ Override public void addEdge ( Edge e ) { } }
addDirectionalEdge ( e ) ; if ( e . isBidirectional ( ) ) addDirectionalEdge ( new Edge ( e . getToNodeId ( ) , e . getFromNodeId ( ) , e . getWeight ( ) ) ) ;
public class ListViewActivity { /** * Initialize RendererAdapter */ private void initAdapter ( ) { } }
RandomVideoCollectionGenerator randomVideoCollectionGenerator = new RandomVideoCollectionGenerator ( ) ; AdapteeCollection < Video > videoCollection = randomVideoCollectionGenerator . generateListAdapteeVideoCollection ( VIDEO_COUNT ) ; adapter = new RendererAdapter < Video > ( new VideoRendererBuilder ( ) , videoCollection ) ;
public class OpenAPIModelFilterAdapter { /** * { @ inheritDoc } */ @ Override public RequestBody visitRequestBody ( Context context , String key , RequestBody rb ) { } }
visitor . visitRequestBody ( context , key , rb ) ; return rb ;
public class ConfigParseOptions { /** * Set the file format . If set to null , assume { @ link ConfigSyntax # CONF } . * @ param filename * a configuration file name * @ return options with the syntax set */ public ConfigParseOptions setSyntaxFromFilename ( String filename ) { } }
ConfigSyntax syntax = ConfigImplUtil . syntaxFromExtension ( filename ) ; return setSyntax ( syntax ) ;
public class MtasSpanIntersectingQuery { /** * ( non - Javadoc ) * @ see mtas . search . spans . util . MtasSpanQuery # rewrite ( org . apache . lucene . index . * IndexReader ) */ @ Override public MtasSpanQuery rewrite ( IndexReader reader ) throws IOException { } }
MtasSpanQuery newQ1 = ( MtasSpanQuery ) q1 . rewrite ( reader ) ; MtasSpanQuery newQ2 = ( MtasSpanQuery ) q2 . rewrite ( reader ) ; if ( ! newQ1 . equals ( q1 ) || ! newQ2 . equals ( q2 ) ) { return new MtasSpanIntersectingQuery ( newQ1 , newQ2 ) . rewrite ( reader ) ; } else if ( newQ1 . equals ( newQ2 ) ) { return newQ1 ; } else { boolean returnNone ; returnNone = newQ1 . getMaximumWidth ( ) != null && newQ1 . getMaximumWidth ( ) == 0 ; returnNone |= newQ2 . getMaximumWidth ( ) != null && newQ2 . getMaximumWidth ( ) == 0 ; if ( returnNone ) { return new MtasSpanMatchNoneQuery ( this . getField ( ) ) ; } else { return super . rewrite ( reader ) ; } }
public class MessageCreators { /** * Adds new message creator POJO instance from type . * @ param type */ public void addType ( String type ) { } }
try { messageCreators . add ( Class . forName ( type ) . newInstance ( ) ) ; } catch ( ClassNotFoundException | IllegalAccessException e ) { throw new CitrusRuntimeException ( "Unable to access message creator type: " + type , e ) ; } catch ( InstantiationException e ) { throw new CitrusRuntimeException ( "Unable to create message creator instance of type: " + type , e ) ; }
public class V1LoggersModel { /** * { @ inheritDoc } */ @ Override public LoggersModel addLogger ( LoggerModel logger ) { } }
addChildModel ( logger ) ; _loggers . add ( logger ) ; return this ;
public class AbstractProcessor { /** * Determines the identification of a command line processor by capture the * first line of its output for a specific command . * @ param command * array of command line arguments starting with executable * name . For example , { " cl " } * @ param fallback * start of identifier if there is an error in executing the * command * @ return identifier for the processor */ protected static String getIdentifier ( final String [ ] command , final String fallback ) { } }
String identifier = fallback ; try { final String [ ] cmdout = CaptureStreamHandler . run ( command ) ; if ( cmdout . length > 0 ) { identifier = cmdout [ 0 ] ; } } catch ( final Throwable ex ) { identifier = fallback + ":" + ex . toString ( ) ; } return identifier ;
public class AmazonEC2Client { /** * Describes available services to which you can create a VPC endpoint . * @ param describeVpcEndpointServicesRequest * Contains the parameters for DescribeVpcEndpointServices . * @ return Result of the DescribeVpcEndpointServices operation returned by the service . * @ sample AmazonEC2 . DescribeVpcEndpointServices * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / ec2-2016-11-15 / DescribeVpcEndpointServices " * target = " _ top " > AWS API Documentation < / a > */ @ Override public DescribeVpcEndpointServicesResult describeVpcEndpointServices ( DescribeVpcEndpointServicesRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeDescribeVpcEndpointServices ( request ) ;
public class BlockingClient { /** * Closes the connection to the server , triggering the { @ link StreamConnection # connectionClosed ( ) } * event on the network - handling thread where all callbacks occur . */ @ Override public void closeConnection ( ) { } }
// Closes the channel , triggering an exception in the network - handling thread triggering connectionClosed ( ) try { vCloseRequested = true ; socket . close ( ) ; } catch ( IOException e ) { throw new RuntimeException ( e ) ; }
public class ConvertBufferedImage { /** * Copies the original image into the output image . If it can ' t do a copy a new image is created and returned * @ param original Original image * @ param output ( Optional ) Storage for copy . * @ return The copied image . May be a new instance */ public static BufferedImage checkCopy ( BufferedImage original , BufferedImage output ) { } }
ColorModel cm = original . getColorModel ( ) ; boolean isAlphaPremultiplied = cm . isAlphaPremultiplied ( ) ; if ( output == null || original . getWidth ( ) != output . getWidth ( ) || original . getHeight ( ) != output . getHeight ( ) || original . getType ( ) != output . getType ( ) ) { WritableRaster raster = original . copyData ( original . getRaster ( ) . createCompatibleWritableRaster ( ) ) ; return new BufferedImage ( cm , raster , isAlphaPremultiplied , null ) ; } original . copyData ( output . getRaster ( ) ) ; return output ;
public class TimelineModel { /** * Updates all given events in the model with UI update . * @ param events collection of events to be updated * @ param timelineUpdater TimelineUpdater instance to update the events in UI */ public void updateAll ( Collection < TimelineEvent > events , TimelineUpdater timelineUpdater ) { } }
if ( events != null && ! events . isEmpty ( ) ) { for ( TimelineEvent event : events ) { update ( event , timelineUpdater ) ; } }
public class MappingServiceController { /** * Schedules a { @ link MappingJobExecution } . * @ param mappingProjectId ID of the mapping project * @ param targetEntityTypeId ID of the target entity to create or update * @ param label label of the target entity to create * @ param rawPackageId ID of the package to put the newly created entity in * @ return the href of the created MappingJobExecution */ @ PostMapping ( value = "/map" , produces = TEXT_PLAIN_VALUE ) public ResponseEntity < String > scheduleMappingJob ( @ RequestParam String mappingProjectId , @ RequestParam String targetEntityTypeId , @ RequestParam ( required = false ) String label , @ RequestParam ( required = false , name = "package" ) String rawPackageId , @ RequestParam ( required = false ) Boolean addSourceAttribute ) throws URISyntaxException { } }
mappingProjectId = mappingProjectId . trim ( ) ; targetEntityTypeId = targetEntityTypeId . trim ( ) ; label = trim ( label ) ; String packageId = trim ( rawPackageId ) ; try { validateEntityName ( targetEntityTypeId ) ; if ( mappingService . getMappingProject ( mappingProjectId ) == null ) { throw new MolgenisDataException ( "No mapping project found with ID " + mappingProjectId ) ; } if ( packageId != null ) { Package aPackage = dataService . getMeta ( ) . getPackage ( packageId ) . orElseThrow ( ( ) -> new MolgenisDataException ( "No package found with ID " + packageId ) ) ; if ( isSystemPackage ( aPackage ) ) { throw new MolgenisDataException ( format ( "Package [{0}] is a system package." , packageId ) ) ; } } } catch ( MolgenisDataException mde ) { return ResponseEntity . badRequest ( ) . contentType ( TEXT_PLAIN ) . body ( mde . getMessage ( ) ) ; } String jobHref = submitMappingJob ( mappingProjectId , targetEntityTypeId , addSourceAttribute , packageId , label ) . getBody ( ) ; return created ( new URI ( jobHref ) ) . contentType ( TEXT_PLAIN ) . body ( jobHref ) ;
public class JobClient { /** * Display the stats of the cluster with per tracker details * @ throws IOException */ private void listTrackers ( ) throws IOException { } }
ClusterStatus fullStatus = jobSubmitClient . getClusterStatus ( true ) ; Collection < TaskTrackerStatus > trackers = fullStatus . getTaskTrackersDetails ( ) ; Set < String > activeTrackers = new HashSet < String > ( fullStatus . getActiveTrackerNames ( ) ) ; List < Float > mapsProgress = new ArrayList < Float > ( ) ; List < Float > reducesProgress = new ArrayList < Float > ( ) ; int finishedMapsFromRunningJobs = 0 ; int finishedReducesFromRunningJobs = 0 ; System . out . println ( "Total Map Tasks in Running Jobs: " + fullStatus . getTotalMapTasks ( ) ) ; System . out . println ( "Total Reduce Tasks in Running Jobs: " + fullStatus . getTotalReduceTasks ( ) ) ; for ( TaskTrackerStatus tracker : trackers ) { System . out . println ( tracker . getTrackerName ( ) ) ; // List < TaskStatus > tasks = tracker . getTaskReports ( ) ; Collection < TaskStatus > tasks = fullStatus . getTaskTrackerTasksStatuses ( tracker . getTrackerName ( ) ) ; for ( TaskStatus task : tasks ) { TaskStatus . State state = task . getRunState ( ) ; if ( task . getIsMap ( ) && ( state == TaskStatus . State . RUNNING || state == TaskStatus . State . UNASSIGNED ) ) { mapsProgress . add ( task . getProgress ( ) ) ; } else if ( ! task . getIsMap ( ) && ( state == TaskStatus . State . RUNNING || state == TaskStatus . State . UNASSIGNED ) ) { reducesProgress . add ( task . getProgress ( ) ) ; } else if ( task . getIsMap ( ) && state == TaskStatus . State . SUCCEEDED ) { finishedMapsFromRunningJobs ++ ; } else if ( ! task . getIsMap ( ) && state == TaskStatus . State . SUCCEEDED ) { finishedReducesFromRunningJobs ++ ; } } if ( activeTrackers . contains ( tracker . getTrackerName ( ) ) ) { System . out . println ( "\tActive" ) ; } else { System . out . println ( "\tBlacklisted" ) ; } System . out . println ( "\tLast Seen: " + tracker . getLastSeen ( ) ) ; System . out . println ( "\tMap Tasks Running: " + tracker . countMapTasks ( ) + "/" + tracker . getMaxMapSlots ( ) ) ; System . out . println ( "\tMap Tasks Progress: " + mapsProgress . toString ( ) ) ; System . out . println ( "\tFinished Map Tasks From Running Jobs: " + finishedMapsFromRunningJobs ) ; System . out . println ( "\tReduce Tasks Running: " + tracker . countReduceTasks ( ) + "/" + tracker . getMaxReduceSlots ( ) ) ; System . out . println ( "\tReduce Tasks Progress: " + reducesProgress . toString ( ) ) ; System . out . println ( "\tTask Tracker Failures: " + tracker . getFailures ( ) ) ; mapsProgress . clear ( ) ; reducesProgress . clear ( ) ; }
public class GA4GHPicardRunner { /** * Starts the Picard tool process based on constructed command . * @ throws IOException */ private void startProcess ( ) throws IOException { } }
LOG . info ( "Building process" ) ; ProcessBuilder processBuilder = new ProcessBuilder ( command ) ; processBuilder . redirectError ( ProcessBuilder . Redirect . INHERIT ) ; processBuilder . redirectOutput ( ProcessBuilder . Redirect . INHERIT ) ; LOG . info ( "Starting process" ) ; process = processBuilder . start ( ) ; LOG . info ( "Process started" ) ;
public class CookieApplication { /** * Creates a context for the templates . * @ param request The user ' s http request * @ param response The user ' s http response * @ return the context for the templates */ public Object createContext ( ApplicationRequest request , ApplicationResponse response ) { } }
return new CookieContext ( request , response , mDomain , mPath , mIsSecure ) ;
public class RowKey { /** * Extracts the name of the metric ID contained in a row key . * @ param tsdb The TSDB to use . * @ param row The actual row key . * @ return A deferred to wait on that will return the name of the metric . * @ throws IllegalArgumentException if the row key is too short due to missing * salt or metric or if it ' s null / empty . * @ throws NoSuchUniqueId if the UID could not resolve to a string * @ since 1.2 */ public static Deferred < String > metricNameAsync ( final TSDB tsdb , final byte [ ] row ) { } }
if ( row == null || row . length < 1 ) { throw new IllegalArgumentException ( "Row key cannot be null or empty" ) ; } if ( row . length < Const . SALT_WIDTH ( ) + tsdb . metrics . width ( ) ) { throw new IllegalArgumentException ( "Row key is too short" ) ; } final byte [ ] id = Arrays . copyOfRange ( row , Const . SALT_WIDTH ( ) , tsdb . metrics . width ( ) + Const . SALT_WIDTH ( ) ) ; return tsdb . metrics . getNameAsync ( id ) ;
public class JmsEventTransportImpl { /** * Initialise the thread pool to have the requested number of threads available , life span of threads ( set to 0 ) not used as threads will never be eligible to die as coreSize = maxSize */ public void initThreadPool ( ) { } }
CustomizableThreadFactory ctf = new CustomizableThreadFactory ( ) ; ctf . setDaemon ( true ) ; ctf . setThreadNamePrefix ( getTransportName ( ) + "-Publisher-" ) ; threadPool = new JMXReportingThreadPoolExecutor ( threadPoolSize , threadPoolSize , 0 , TimeUnit . SECONDS , new LinkedBlockingQueue < Runnable > ( ) , ctf ) ;
public class WebFragmentTypeImpl { /** * Returns all < code > data - source < / code > elements * @ return list of < code > data - source < / code > */ public List < DataSourceType < WebFragmentType < T > > > getAllDataSource ( ) { } }
List < DataSourceType < WebFragmentType < T > > > list = new ArrayList < DataSourceType < WebFragmentType < T > > > ( ) ; List < Node > nodeList = childNode . get ( "data-source" ) ; for ( Node node : nodeList ) { DataSourceType < WebFragmentType < T > > type = new DataSourceTypeImpl < WebFragmentType < T > > ( this , "data-source" , childNode , node ) ; list . add ( type ) ; } return list ;
public class ExpandableExtension { /** * opens the expandable item at the given position * @ param position the global position * @ param notifyItemChanged true if we need to call notifyItemChanged . DEFAULT : false */ public void expand ( int position , boolean notifyItemChanged ) { } }
Item item = mFastAdapter . getItem ( position ) ; if ( item != null && item instanceof IExpandable ) { IExpandable expandable = ( IExpandable ) item ; // if this item is not already expanded and has sub items we go on if ( ! expandable . isExpanded ( ) && expandable . getSubItems ( ) != null && expandable . getSubItems ( ) . size ( ) > 0 ) { IAdapter < Item > adapter = mFastAdapter . getAdapter ( position ) ; if ( adapter != null && adapter instanceof IItemAdapter ) { ( ( IItemAdapter < ? , Item > ) adapter ) . addInternal ( position + 1 , expandable . getSubItems ( ) ) ; } // remember that this item is now opened ( not collapsed ) expandable . withIsExpanded ( true ) ; // we need to notify to get the correct drawable if there is one showing the current state if ( notifyItemChanged ) { mFastAdapter . notifyItemChanged ( position ) ; } } }
public class Monitoring { /** * < pre > * Monitoring configurations for sending metrics to the producer project . * There can be multiple producer destinations . A monitored resouce type may * appear in multiple monitoring destinations if different aggregations are * needed for different sets of metrics associated with that monitored * resource type . A monitored resource and metric pair may only be used once * in the Monitoring configuration . * < / pre > * < code > repeated . google . api . Monitoring . MonitoringDestination producer _ destinations = 1 ; < / code > */ public java . util . List < com . google . api . Monitoring . MonitoringDestination > getProducerDestinationsList ( ) { } }
return producerDestinations_ ;
public class SingletonCacheWriter { /** * Called when the SingletonStore discovers that the cache has become the coordinator and push in memory state has * been enabled . It might not actually push the state if there ' s an ongoing push task running , in which case will * wait for the push task to finish . */ private void doPushState ( ) throws PushStateException { } }
if ( pushStateFuture == null || pushStateFuture . isDone ( ) ) { Callable < ? > task = createPushStateTask ( ) ; pushStateFuture = executor . submit ( task ) ; try { waitForTaskToFinish ( pushStateFuture , singletonConfiguration . pushStateTimeout ( ) , TimeUnit . MILLISECONDS ) ; } catch ( Exception e ) { throw new PushStateException ( "unable to complete in memory state push to cache loader" , e ) ; } } else { /* at the most , we wait for push state timeout value . if it push task finishes earlier , this call * will stop when the push task finishes , otherwise a timeout exception will be reported */ awaitForPushToFinish ( pushStateFuture , singletonConfiguration . pushStateTimeout ( ) , TimeUnit . MILLISECONDS ) ; }
public class AppsImpl { /** * Returns the available endpoint deployment regions and URLs . * @ param appId The application ID . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the Map & lt ; String , String & gt ; object */ public Observable < Map < String , String > > listEndpointsAsync ( UUID appId ) { } }
return listEndpointsWithServiceResponseAsync ( appId ) . map ( new Func1 < ServiceResponse < Map < String , String > > , Map < String , String > > ( ) { @ Override public Map < String , String > call ( ServiceResponse < Map < String , String > > response ) { return response . body ( ) ; } } ) ;
public class KXmlParser { /** * Read an element content spec . This is a regular expression - like pattern * of names or other content specs . The following operators are supported : * sequence : ( a , b , c ) * choice : ( a | b | c ) * optional : a ? * one or more : a + * any number : a * * The special name ' # PCDATA ' is permitted but only if it is the first * element of the first group : * ( # PCDATA | a | b ) * The top - level element must be either a choice , a sequence , or one of the * special names EMPTY and ANY . */ private void readContentSpec ( ) throws IOException , XmlPullParserException { } }
// this implementation is very lenient ; it scans for balanced parens only skip ( ) ; int c = peekCharacter ( ) ; if ( c == '(' ) { int depth = 0 ; do { if ( c == '(' ) { depth ++ ; } else if ( c == ')' ) { depth -- ; } else if ( c == - 1 ) { throw new XmlPullParserException ( "Unterminated element content spec" , this , null ) ; } position ++ ; c = peekCharacter ( ) ; } while ( depth > 0 ) ; if ( c == '*' || c == '?' || c == '+' ) { position ++ ; } } else if ( c == EMPTY [ 0 ] ) { read ( EMPTY ) ; } else if ( c == ANY [ 0 ] ) { read ( ANY ) ; } else { throw new XmlPullParserException ( "Expected element content spec" , this , null ) ; }
public class TraceOptions { /** * Returns a { @ code TraceOption } built from a lowercase base16 representation . * @ param src the lowercase base16 representation . * @ param srcOffset the offset in the buffer where the representation of the { @ code TraceOptions } * begins . * @ return a { @ code TraceOption } built from a lowercase base16 representation . * @ throws NullPointerException if { @ code src } is null . * @ throws IllegalArgumentException if { @ code src . length } is not { @ code 2 * TraceOption . SIZE } OR * if the { @ code str } has invalid characters . * @ since 0.18 */ public static TraceOptions fromLowerBase16 ( CharSequence src , int srcOffset ) { } }
return new TraceOptions ( BigendianEncoding . byteFromBase16String ( src , srcOffset ) ) ;
public class EphemeralKey { /** * Creates an ephemeral API key for a given resource . * @ param params request parameters * @ param options request options . { @ code stripeVersion } is required when creating ephemeral * keys . it must have non - null { @ link RequestOptions # getStripeVersionOverride ( ) } . * @ return the new ephemeral key */ public static EphemeralKey create ( Map < String , Object > params , RequestOptions options ) throws StripeException { } }
if ( options . getStripeVersionOverride ( ) == null ) { throw new IllegalArgumentException ( "`stripeVersionOverride` must be specified in " + "RequestOptions with stripe version of your mobile client." ) ; } return request ( RequestMethod . POST , classUrl ( EphemeralKey . class ) , params , EphemeralKey . class , options ) ;
public class LibraryUtils { /** * A wrapper function of handling exceptions that have a known root cause , such as { @ link AmazonServiceException } . * @ param exceptionHandler the { @ link ExceptionHandler } to handle exceptions . * @ param progressStatus the current progress status { @ link ProgressStatus } . * @ param e the exception needs to be handled . * @ param message the exception message . */ public static void handleException ( ExceptionHandler exceptionHandler , ProgressStatus progressStatus , Exception e , String message ) { } }
ProcessingLibraryException exception = new ProcessingLibraryException ( message , e , progressStatus ) ; exceptionHandler . handleException ( exception ) ;
public class SslPolicy { /** * The ciphers . * @ param ciphers * The ciphers . */ public void setCiphers ( java . util . Collection < Cipher > ciphers ) { } }
if ( ciphers == null ) { this . ciphers = null ; return ; } this . ciphers = new java . util . ArrayList < Cipher > ( ciphers ) ;