signature stringlengths 43 39.1k | implementation stringlengths 0 450k |
|---|---|
public class KafkaUtils { /** * Generates 32 bit murmur2 hash from byte array
* @ param data byte array to hash
* @ return 32 bit hash of the given array */
@ SuppressWarnings ( "fallthrough" ) public static int murmur2 ( final byte [ ] data ) { } } | int length = data . length ; int seed = 0x9747b28c ; // ' m ' and ' r ' are mixing constants generated offline .
// They ' re not really ' magic ' , they just happen to work well .
final int m = 0x5bd1e995 ; final int r = 24 ; // Initialize the hash to a random value
int h = seed ^ length ; int length4 = length / 4 ; // SUPPRESS CHECKSTYLE : OFF LineLength
for ( int i = 0 ; i < length4 ; i ++ ) { final int i4 = i * 4 ; int k = ( data [ i4 + 0 ] & 0xff ) + ( ( data [ i4 + 1 ] & 0xff ) << 8 ) + ( ( data [ i4 + 2 ] & 0xff ) << 16 ) + ( ( data [ i4 + 3 ] & 0xff ) << 24 ) ; k *= m ; k ^= k >>> r ; k *= m ; h *= m ; h ^= k ; } // Handle the last few bytes of the input array
switch ( length % 4 ) { // SUPPRESS CHECKSTYLE : OFF FallThrough
case 3 : h ^= ( data [ ( length & ~ 3 ) + 2 ] & 0xff ) << 16 ; // SUPPRESS CHECKSTYLE : OFF FallThrough
case 2 : h ^= ( data [ ( length & ~ 3 ) + 1 ] & 0xff ) << 8 ; // SUPPRESS CHECKSTYLE : OFF FallThrough
case 1 : h ^= data [ length & ~ 3 ] & 0xff ; h *= m ; default : } h ^= h >>> 13 ; h *= m ; h ^= h >>> 15 ; return h ; |
public class HttpClient { /** * Apply cookies configuration emitted by the returned Mono before requesting .
* @ param cookieBuilder the cookies { @ link Function } to invoke before sending
* @ return a new { @ link HttpClient } */
public final HttpClient cookiesWhen ( String name , Function < ? super Cookie , Mono < ? extends Cookie > > cookieBuilder ) { } } | return new HttpClientCookieWhen ( this , name , cookieBuilder ) ; |
public class AmazonLightsailClient { /** * Creates a snapshot of your database in Amazon Lightsail . You can use snapshots for backups , to make copies of a
* database , and to save data before deleting a database .
* The < code > create relational database snapshot < / code > operation supports tag - based access control via request
* tags . For more information , see the < a
* href = " https : / / lightsail . aws . amazon . com / ls / docs / en / articles / amazon - lightsail - controlling - access - using - tags "
* > Lightsail Dev Guide < / a > .
* @ param createRelationalDatabaseSnapshotRequest
* @ return Result of the CreateRelationalDatabaseSnapshot operation returned by the service .
* @ throws ServiceException
* A general service exception .
* @ throws InvalidInputException
* Lightsail throws this exception when user input does not conform to the validation rules of an input
* field . < / p > < note >
* Domain - related APIs are only available in the N . Virginia ( us - east - 1 ) Region . Please set your AWS Region
* configuration to us - east - 1 to create , view , or edit these resources .
* @ throws NotFoundException
* Lightsail throws this exception when it cannot find a resource .
* @ throws OperationFailureException
* Lightsail throws this exception when an operation fails to execute .
* @ throws AccessDeniedException
* Lightsail throws this exception when the user cannot be authenticated or uses invalid credentials to
* access a resource .
* @ throws AccountSetupInProgressException
* Lightsail throws this exception when an account is still in the setup in progress state .
* @ throws UnauthenticatedException
* Lightsail throws this exception when the user has not been authenticated .
* @ sample AmazonLightsail . CreateRelationalDatabaseSnapshot
* @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / lightsail - 2016-11-28 / CreateRelationalDatabaseSnapshot "
* target = " _ top " > AWS API Documentation < / a > */
@ Override public CreateRelationalDatabaseSnapshotResult createRelationalDatabaseSnapshot ( CreateRelationalDatabaseSnapshotRequest request ) { } } | request = beforeClientExecution ( request ) ; return executeCreateRelationalDatabaseSnapshot ( request ) ; |
public class XmlValidationUtils { /** * Checks if given element node is either on ignore list or
* contains @ ignore @ tag inside control message
* @ param source
* @ param received
* @ param ignoreExpressions
* @ param namespaceContext
* @ return */
public static boolean isElementIgnored ( Node source , Node received , Set < String > ignoreExpressions , NamespaceContext namespaceContext ) { } } | if ( isElementIgnored ( received , ignoreExpressions , namespaceContext ) ) { if ( log . isDebugEnabled ( ) ) { log . debug ( "Element: '" + received . getLocalName ( ) + "' is on ignore list - skipped validation" ) ; } return true ; } else if ( source . getFirstChild ( ) != null && StringUtils . hasText ( source . getFirstChild ( ) . getNodeValue ( ) ) && source . getFirstChild ( ) . getNodeValue ( ) . trim ( ) . equals ( Citrus . IGNORE_PLACEHOLDER ) ) { if ( log . isDebugEnabled ( ) ) { log . debug ( "Element: '" + received . getLocalName ( ) + "' is ignored by placeholder '" + Citrus . IGNORE_PLACEHOLDER + "'" ) ; } return true ; } return false ; |
public class TrafficPlugin { /** * Add primary layer to the map . */
private void addPrimaryLayer ( ) { } } | LineLayer primary = TrafficLayer . getLineLayer ( Primary . BASE_LAYER_ID , Primary . ZOOM_LEVEL , Primary . FILTER , Primary . FUNCTION_LINE_COLOR , Primary . FUNCTION_LINE_WIDTH , Primary . FUNCTION_LINE_OFFSET ) ; LineLayer primaryCase = TrafficLayer . getLineLayer ( Primary . CASE_LAYER_ID , Primary . ZOOM_LEVEL , Primary . FILTER , Primary . FUNCTION_LINE_COLOR_CASE , Primary . FUNCTION_LINE_WIDTH_CASE , Primary . FUNCTION_LINE_OFFSET , Primary . FUNCTION_LINE_OPACITY_CASE ) ; addTrafficLayersToMap ( primaryCase , primary , getLastAddedLayerId ( ) ) ; |
public class PrintStream { /** * Flushes the stream and checks its error state . The internal error state
* is set to < code > true < / code > when the underlying output stream throws an
* < code > IOException < / code > other than < code > InterruptedIOException < / code > ,
* and when the < code > setError < / code > method is invoked . If an operation
* on the underlying output stream throws an
* < code > InterruptedIOException < / code > , then the < code > PrintStream < / code >
* converts the exception back into an interrupt by doing :
* < pre >
* Thread . currentThread ( ) . interrupt ( ) ;
* < / pre >
* or the equivalent .
* @ return < code > true < / code > if and only if this stream has encountered an
* < code > IOException < / code > other than
* < code > InterruptedIOException < / code > , or the
* < code > setError < / code > method has been invoked */
public boolean checkError ( ) { } } | if ( out != null ) flush ( ) ; if ( out instanceof java . io . PrintStream ) { PrintStream ps = ( PrintStream ) out ; return ps . checkError ( ) ; } return trouble ; |
public class NumberHelper { /** * Utility method to convert a String to an Integer , and
* handles null values .
* @ param value string representation of an integer
* @ return int value */
public static final Integer parseInteger ( String value ) { } } | return ( value == null || value . length ( ) == 0 ? null : Integer . valueOf ( Integer . parseInt ( value ) ) ) ; |
public class EddystoneUID { /** * Get the 10 - byte namespace ID as an upper - case hex string .
* @ return
* The namespace ID . */
public String getNamespaceIdAsString ( ) { } } | if ( mNamespaceIdAsString == null ) { mNamespaceIdAsString = Bytes . toHexString ( getNamespaceId ( ) , true ) ; } return mNamespaceIdAsString ; |
public class ProtoParser { /** * Reads a service declaration and returns it . */
private ServiceElement readService ( String documentation ) { } } | String name = readName ( ) ; ServiceElement . Builder builder = ServiceElement . builder ( ) . name ( name ) . qualifiedName ( prefix + name ) . documentation ( documentation ) ; if ( readChar ( ) != '{' ) throw unexpected ( "expected '{'" ) ; while ( true ) { String rpcDocumentation = readDocumentation ( ) ; if ( peekChar ( ) == '}' ) { pos ++ ; break ; } Object declared = readDeclaration ( rpcDocumentation , Context . SERVICE ) ; if ( declared instanceof RpcElement ) { builder . addRpc ( ( RpcElement ) declared ) ; } else if ( declared instanceof OptionElement ) { builder . addOption ( ( OptionElement ) declared ) ; } } return builder . build ( ) ; |
public class QueryRunner { /** * Executes the given INSERT , UPDATE , or DELETE SQL statement . The
* < code > Connection < / code > is retrieved from the < code > DataSource < / code >
* set in the constructor . This < code > Connection < / code > must be in
* auto - commit mode or the update will not be saved .
* @ param sql The SQL statement to execute .
* @ param params Initializes the PreparedStatement ' s IN ( i . e . ' ? ' )
* parameters .
* @ throws java . sql . SQLException if a database access error occurs
* @ return The number of rows updated . */
public int update ( String sql , Object [ ] params ) throws SQLException { } } | Connection conn = this . prepareConnection ( ) ; try { return this . update ( conn , sql , params ) ; } finally { close ( conn ) ; } |
public class JmsJMSContextImpl { /** * ( non - Javadoc )
* @ see javax . jms . JMSContext # createTopic ( java . lang . String ) */
@ Override public Topic createTopic ( String topicName ) throws JMSRuntimeException { } } | if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( this , tc , "createTopic" , new Object [ ] { topicName } ) ; Topic topic = null ; try { topic = jmsSession . createTopic ( topicName ) ; } catch ( JMSException jmse ) { throw ( JMSRuntimeException ) JmsErrorUtils . getJMS2Exception ( jmse , JMSRuntimeException . class ) ; } finally { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( this , tc , "createTopic" , new Object [ ] { topic } ) ; } return topic ; |
public class AbstractProxyIoHandler { /** * Hooked session opened event .
* @ param session the io session */
@ Override public final void sessionOpened ( IoSession session ) throws Exception { } } | ProxyIoSession proxyIoSession = ( ProxyIoSession ) session . getAttribute ( ProxyIoSession . PROXY_SESSION ) ; if ( proxyIoSession . getRequest ( ) instanceof SocksProxyRequest || proxyIoSession . isAuthenticationFailed ( ) || proxyIoSession . getHandler ( ) . isHandshakeComplete ( ) ) { proxySessionOpened ( session ) ; } else { logger . debug ( "Filtered session opened event !" ) ; } |
public class KestrelSpout { /** * メッセージの取得失敗したクライアントに除外設定を行う 。
* @ param info クライアント接続情報
* @ param t 発生例外 */
private void blacklist ( KestrelClientInfo info , Throwable t ) { } } | logger . warn ( "Failed to read from Kestrel at " + info . host + ":" + info . port , t ) ; // this case can happen when it fails to connect to Kestrel ( and so never stores the connection )
info . closeClient ( ) ; info . blacklistTillTimeMs = System . currentTimeMillis ( ) + DEFAULT_BLACKLIST_TIME_MS ; int index = this . clientInfoList . indexOf ( info ) ; // we just closed the connection , so all open reliable reads will be aborted . empty buffers .
for ( Iterator < EmitItem > i = this . emitBuffer . iterator ( ) ; i . hasNext ( ) ; ) { EmitItem item = i . next ( ) ; if ( item . getSourceId ( ) . getIndex ( ) == index ) { i . remove ( ) ; } } |
public class BatchGetDeploymentGroupsResult { /** * Information about the deployment groups .
* < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use
* { @ link # setDeploymentGroupsInfo ( java . util . Collection ) } or { @ link # withDeploymentGroupsInfo ( java . util . Collection ) }
* if you want to override the existing values .
* @ param deploymentGroupsInfo
* Information about the deployment groups .
* @ return Returns a reference to this object so that method calls can be chained together . */
public BatchGetDeploymentGroupsResult withDeploymentGroupsInfo ( DeploymentGroupInfo ... deploymentGroupsInfo ) { } } | if ( this . deploymentGroupsInfo == null ) { setDeploymentGroupsInfo ( new com . amazonaws . internal . SdkInternalList < DeploymentGroupInfo > ( deploymentGroupsInfo . length ) ) ; } for ( DeploymentGroupInfo ele : deploymentGroupsInfo ) { this . deploymentGroupsInfo . add ( ele ) ; } return this ; |
public class DatabasesInner { /** * Remove Database principals permissions .
* @ param resourceGroupName The name of the resource group containing the Kusto cluster .
* @ param clusterName The name of the Kusto cluster .
* @ param databaseName The name of the database in the Kusto cluster .
* @ param value The list of Kusto database principals .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ throws CloudException thrown if the request is rejected by server
* @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
* @ return the DatabasePrincipalListResultInner object if successful . */
public DatabasePrincipalListResultInner removePrincipals ( String resourceGroupName , String clusterName , String databaseName , List < DatabasePrincipalInner > value ) { } } | return removePrincipalsWithServiceResponseAsync ( resourceGroupName , clusterName , databaseName , value ) . toBlocking ( ) . single ( ) . body ( ) ; |
public class ElemTemplateElement { /** * Replace the old child with a new child .
* @ param newChild New child to replace with
* @ param oldChild Old child to be replaced
* @ return The new child
* @ throws DOMException */
public Node replaceChild ( Node newChild , Node oldChild ) throws DOMException { } } | if ( oldChild == null || oldChild . getParentNode ( ) != this ) return null ; ElemTemplateElement newChildElem = ( ( ElemTemplateElement ) newChild ) ; ElemTemplateElement oldChildElem = ( ( ElemTemplateElement ) oldChild ) ; // Fix up previous sibling .
ElemTemplateElement prev = ( ElemTemplateElement ) oldChildElem . getPreviousSibling ( ) ; if ( null != prev ) prev . m_nextSibling = newChildElem ; // Fix up parent ( this )
if ( m_firstChild == oldChildElem ) m_firstChild = newChildElem ; newChildElem . m_parentNode = this ; oldChildElem . m_parentNode = null ; newChildElem . m_nextSibling = oldChildElem . m_nextSibling ; oldChildElem . m_nextSibling = null ; // newChildElem . m _ stylesheet = oldChildElem . m _ stylesheet ;
// oldChildElem . m _ stylesheet = null ;
return newChildElem ; |
public class OggPacketWriter { /** * Sets the current granule position .
* The granule position will be applied to all
* un - flushed packets , and all future packets .
* As such , you should normally either call a flush
* just before or just after this call . */
public void setGranulePosition ( long position ) { } } | currentGranulePosition = position ; for ( OggPage p : buffer ) { p . setGranulePosition ( position ) ; } |
public class ManagementGraph { /** * Returns the output group vertex at the given index in the given stage .
* @ param stage
* the index to the management stage
* @ param index
* the index to the output group vertex
* @ return the output group vertex at the given index in the given stage or < code > null < / code > if either the stage
* does not exists or the given index is invalid in this stage */
public ManagementGroupVertex getOutputGroupVertex ( final int stage , final int index ) { } } | if ( stage >= this . stages . size ( ) ) { return null ; } return this . stages . get ( stage ) . getOutputGroupVertex ( index ) ; |
public class aaatacacsparams { /** * Use this API to update aaatacacsparams . */
public static base_response update ( nitro_service client , aaatacacsparams resource ) throws Exception { } } | aaatacacsparams updateresource = new aaatacacsparams ( ) ; updateresource . serverip = resource . serverip ; updateresource . serverport = resource . serverport ; updateresource . authtimeout = resource . authtimeout ; updateresource . tacacssecret = resource . tacacssecret ; updateresource . authorization = resource . authorization ; updateresource . accounting = resource . accounting ; updateresource . auditfailedcmds = resource . auditfailedcmds ; updateresource . defaultauthenticationgroup = resource . defaultauthenticationgroup ; return updateresource . update_resource ( client ) ; |
public class TopologyAssign { /** * get all task ids which are newly assigned or reassigned */
public static Set < Integer > getNewOrChangedTaskIds ( Set < ResourceWorkerSlot > oldWorkers , Set < ResourceWorkerSlot > workers ) { } } | Set < Integer > rtn = new HashSet < > ( ) ; HashMap < String , ResourceWorkerSlot > workerPortMap = HostPortToWorkerMap ( oldWorkers ) ; for ( ResourceWorkerSlot worker : workers ) { ResourceWorkerSlot oldWorker = workerPortMap . get ( worker . getHostPort ( ) ) ; if ( oldWorker != null ) { Set < Integer > oldTasks = oldWorker . getTasks ( ) ; for ( Integer task : worker . getTasks ( ) ) { if ( ! ( oldTasks . contains ( task ) ) ) rtn . add ( task ) ; } } else { if ( worker . getTasks ( ) != null ) { rtn . addAll ( worker . getTasks ( ) ) ; } } } return rtn ; |
public class ResourceBundlesHandlerImpl { /** * Checks if it is needed to search for variant in post process
* @ return true if it is needed to search for variant in post process */
private boolean isSearchingForVariantInPostProcessNeeded ( ) { } } | boolean needToSearch = false ; ResourceBundlePostProcessor [ ] postprocessors = new ResourceBundlePostProcessor [ ] { postProcessor , unitaryCompositePostProcessor , compositePostProcessor , unitaryCompositePostProcessor } ; for ( ResourceBundlePostProcessor resourceBundlePostProcessor : postprocessors ) { if ( resourceBundlePostProcessor != null && ( ( AbstractChainedResourceBundlePostProcessor ) resourceBundlePostProcessor ) . isVariantPostProcessor ( ) ) { needToSearch = true ; break ; } } return needToSearch ; |
public class InMemoryPushPushPipe { /** * Pushes a message out to all the PushableConsumers .
* @ param message
* the message to be pushed to consumers
* @ throws IOException
* In case IOException of some sort is occurred */
public void pushMessage ( IMessage message ) throws IOException { } } | if ( log . isDebugEnabled ( ) ) { log . debug ( "pushMessage: {} to {} consumers" , message , consumers . size ( ) ) ; } for ( IConsumer consumer : consumers ) { try { ( ( IPushableConsumer ) consumer ) . pushMessage ( this , message ) ; } catch ( Throwable t ) { if ( t instanceof IOException ) { throw ( IOException ) t ; } log . error ( "Exception pushing message to consumer" , t ) ; } } |
public class DimmedIcon { /** * documentation inherited from interface Icon */
public void paintIcon ( Component c , Graphics g , int x , int y ) { } } | Graphics2D gfx = ( Graphics2D ) g ; Composite ocomp = gfx . getComposite ( ) ; gfx . setComposite ( _alpha ) ; _icon . paintIcon ( c , gfx , x , y ) ; gfx . setComposite ( ocomp ) ; |
public class OmsTrentoP { /** * Verifica la validità dei dati , OSSERVAZIONE con OMS non necessaria ,
* vedere dichiarazione variabili per il range .
* @ throw IllegalArgumentException se un parametro non rispetta certe
* condizioni ( in OMS3 fatto dalle annotation )
* @ return true if there is the percentage area . */
private boolean verifyParameter ( ) { } } | // checkNull ( inPipes , pAccuracy ) ;
boolean isAreaAllDry ; if ( pMode < 0 || pMode > 1 ) { pm . errorMessage ( msg . message ( "trentoP.error.mode" ) ) ; throw new IllegalArgumentException ( msg . message ( "trentoP.error.mode" ) ) ; } if ( inPipes == null ) { pm . errorMessage ( msg . message ( "trentoP.error.inputMatrix" ) + " geometry file" ) ; throw new IllegalArgumentException ( msg . message ( "trentoP.error.inputMatrix" ) + " geometry file" ) ; } /* Il numero di giunzioni in un nodo non puo ' superiore a 7 */
if ( pMaxJunction <= 0 || pMaxJunction > 6 ) { pm . errorMessage ( msg . message ( "trentoP.error.maxJunction" ) ) ; throw new IllegalArgumentException ( ) ; } /* * Il numero di iterazioni ammesso non puo ' essere troppo piccolo ne '
* eccessivamente grande */
if ( pJMax < 3 || pJMax > 1000 ) { pm . errorMessage ( msg . message ( "trentoP.error.jMax" ) ) ; throw new IllegalArgumentException ( msg . message ( "trentoP.error.jMax" ) ) ; } /* * La precisione con cui si cercano alcune soluzioni non puo ' essere
* negativa */
if ( pAccuracy == null || pAccuracy <= 0 ) { pm . errorMessage ( msg . message ( "trentoP.error.accuracy" ) ) ; throw new IllegalArgumentException ( ) ; } /* Intervallo in cui puo variare il riempimento minimo */
if ( pMinG <= 0 || pMinG > 0.1 ) { pm . errorMessage ( msg . message ( "trentoP.error.minG" ) ) ; throw new IllegalArgumentException ( ) ; } /* Non sono ammesse portate minime negative nei tubi */
if ( pMinDischarge <= 0 ) { pm . errorMessage ( msg . message ( "trentoP.error.minDischarge" ) ) ; throw new IllegalArgumentException ( ) ; } /* Il fattore di celerita ' deve essere compreso tra 1 e 1.6 */
if ( pCelerityFactor < 1 || pCelerityFactor > 1.6 ) { pm . errorMessage ( msg . message ( "trentoP.error.celerity" ) ) ; throw new IllegalArgumentException ( ) ; } /* EXPONENT non puo ' essere negativo */
if ( pExponent <= 0 ) { pm . errorMessage ( msg . message ( "trentoP.error.exponent" ) ) ; throw new IllegalArgumentException ( ) ; } /* La tolleranza non puo ' essere nulla tantomeno negativa */
if ( pTolerance <= 0 ) { pm . errorMessage ( msg . message ( "trentoP.error.tolerance" ) ) ; throw new IllegalArgumentException ( ) ; } if ( pGamma <= 0 ) { pm . errorMessage ( msg . message ( "trentoP.error.gamma" ) ) ; throw new IllegalArgumentException ( ) ; } if ( pEspInflux <= 0 ) { pm . errorMessage ( msg . message ( "trentoP.error.eps1" ) ) ; throw new IllegalArgumentException ( ) ; } SimpleFeatureType schema = inPipes . getSchema ( ) ; if ( pMode == 0 ) { // checkNull ( pA , pN , pTau , inDiameters ) ;
isAreaAllDry = Utility . verifyProjectType ( schema , pm ) ; if ( pA == null || pA <= 0 ) { pm . errorMessage ( msg . message ( "trentoP.error.a" ) ) ; throw new IllegalArgumentException ( msg . message ( "trentoP.error.a" ) ) ; } if ( pN == null || pN < 0.05 || pN > 0.95 ) { pm . errorMessage ( msg . message ( "trentoP.error.n" ) ) ; throw new IllegalArgumentException ( msg . message ( "trentoP.error.n" ) ) ; } if ( pTau == null || pTau <= 0 ) { pm . errorMessage ( msg . message ( "trentoP.error.tau" ) ) ; throw new IllegalArgumentException ( msg . message ( "trentoP.error.tau" ) ) ; } if ( pG == null || pG <= 0 || pG > 0.99 ) { pm . errorMessage ( msg . message ( "trentoP.error.g" ) ) ; throw new IllegalArgumentException ( msg . message ( "trentoP.error.g" ) ) ; } if ( pAlign != 0 && pAlign != 1 ) { pm . errorMessage ( msg . message ( "trentoP.error.align" ) ) ; throw new IllegalArgumentException ( msg . message ( "trentoP.error.align" ) ) ; } /* Lo scavo minimo non puo ' essere uguale o inferiore a 0 */
if ( pMinimumDepth <= 0 ) { pm . errorMessage ( msg . message ( "trentoP.error.scavomin" ) ) ; throw new IllegalArgumentException ( ) ; } /* Pecisione con cui si ricerca la portata nelle aree non di testa . */
if ( pEpsilon <= 0 || pEpsilon > 1 ) { pm . errorMessage ( msg . message ( "trentoP.error.epsilon" ) ) ; throw new IllegalArgumentException ( ) ; } /* * L ' angolo di riempimento minimo non puo ' essere inferiore a 3.14
* [ rad ] */
if ( pMaxTheta < 3.14 ) { pm . errorMessage ( msg . message ( "trentoP.error.maxtheta" ) ) ; throw new IllegalArgumentException ( ) ; } if ( pC <= 0 ) { pm . errorMessage ( msg . message ( "trentoP.error.c" ) ) ; throw new IllegalArgumentException ( ) ; } if ( inDiameters == null ) { throw new IllegalArgumentException ( ) ; } /* * Il passo temporale con cui valutare le portate non puo ' essere
* inferiore a 0.015 [ min ] */
if ( tDTp < 0.015 ) { pm . errorMessage ( msg . message ( "trentoP.error.dtp" ) ) ; throw new IllegalArgumentException ( ) ; } /* * Tempo di pioggia minimo da considerare nella massimizzazione
* delle portate non puo ' essere superiore a 5 [ min ] */
if ( tpMin > 5 ) { pm . errorMessage ( msg . message ( "trentoP.error.tpmin" ) ) ; throw new IllegalArgumentException ( ) ; } /* * Tempo di pioggia massimo da adottare nella ricerca della portata
* massima non puo ' essere inferiore a 5 [ min ] */
if ( tpMax < 30 ) { pm . errorMessage ( msg . message ( "trentoP.error.tpmax" ) ) ; throw new IllegalArgumentException ( ) ; } } else { // checkNull ( inRain ) ;
isAreaAllDry = Utility . verifyCalibrationType ( schema , pm ) ; /* * If the inRain is null and the users set the a and n parameters then create the rain data . */
if ( pA != null && pN != null ) { // set it to true in order to search the time at max discharge .
if ( tpMaxCalibration != null ) { foundTp = true ; } else { tpMaxCalibration = tMax ; } if ( dt == null ) { pm . errorMessage ( msg . message ( "trentoP.error.dtp" ) ) ; throw new IllegalArgumentException ( ) ; } if ( tMax < tpMaxCalibration ) { tpMaxCalibration = tMax ; } double tMaxApproximate = ModelsEngine . approximate2Multiple ( tMax , dt ) ; // initialize the output .
int iMax = ( int ) ( tMaxApproximate / dt ) ; int iRainMax = ( int ) ( Math . floor ( ( double ) tpMaxCalibration / ( double ) dt ) ) ; DateTime startTime = new DateTime ( System . currentTimeMillis ( ) ) ; inRain = new LinkedHashMap < DateTime , double [ ] > ( ) ; double tp = ( ( double ) dt ) / 2 ; DateTime newDate = startTime ; for ( int i = 0 ; i <= iMax ; i ++ ) { newDate = newDate . plusMinutes ( dt ) ; double hourTime = tp / Constants . HOUR2MIN ; double value ; if ( i < iRainMax ) { value = pA * pow ( hourTime , pN - 1 ) / Constants . HOUR2MIN ; } else { value = 0.0 ; } inRain . put ( newDate , new double [ ] { value } ) ; tp = tp + dt ; } } else { // force the time steep to null in order to read it to the file .
dt = null ; } if ( inRain == null ) { pm . errorMessage ( msg . message ( "trentoP.error.inputRainMatrix" ) + " rain file" ) ; throw new IllegalArgumentException ( msg . message ( "trentoP.error.inputRainMatrix" ) + " rain file" ) ; } // verificy if the field exist .
} return isAreaAllDry ; |
public class AStar { /** * Create a instance of { @ link AStarNode A * node } .
* @ param node is the node of the graph to put in the A * node .
* @ param cost is the cost to reach the node .
* @ param estimatedCost is the estimated cost to reach the target .
* @ param arrival is the segment , which permits to arrive at the node .
* @ return the A * node . */
@ SuppressWarnings ( "unchecked" ) private AStarNode < ST , PT > node ( PT node , double cost , double estimatedCost , ST arrival ) { } } | final AStarNode < ST , PT > aNode ; if ( node instanceof AStarNode < ? , ? > ) { aNode = ( AStarNode < ST , PT > ) node ; aNode . setArrivalConnection ( arrival ) ; aNode . setCost ( cost ) ; aNode . setEstimatedCost ( estimatedCost ) ; } else { aNode = newAStarNode ( node , cost , estimatedCost , arrival ) ; } return aNode ; |
public class ExtendedPalette { /** * factory method for the addAll component
* @ return addAll component */
protected Component newAddAllComponent ( ) { } } | return new PaletteButton ( "addAllButton" ) { private static final long serialVersionUID = 1L ; protected void onComponentTag ( ComponentTag tag ) { super . onComponentTag ( tag ) ; tag . getAttributes ( ) . put ( "onclick" , getAddAllOnClickJS ( ) ) ; } } ; |
public class ReflectionHelper { /** * Load class with given name from the correct class loader .
* @ param name name of class to load
* @ param < T > type of object to create
* @ return class instance
* @ throws ClassNotFoundException see { @ link ClassLoader # loadClass ( String ) } */
< T > Class < T > loadClass ( String name ) throws ClassNotFoundException { } } | ClassLoader cl = Thread . currentThread ( ) . getContextClassLoader ( ) ; if ( null == cl ) { cl = ToHelper . class . getClassLoader ( ) ; } return ( Class < T > ) cl . loadClass ( name ) ; |
public class JDBCRepository { /** * Any connection returned by this method must be closed by calling
* yieldConnection on this repository . */
public Connection getConnection ( ) throws FetchException { } } | try { if ( mOpenConnections == null ) { throw new FetchException ( "Repository is closed" ) ; } JDBCTransaction txn = localTransactionScope ( ) . getTxn ( ) ; if ( txn != null ) { // Return the connection used by the current transaction .
return txn . getConnection ( ) ; } // Get connection outside lock section since it may block .
Connection con = mDataSource . getConnection ( ) ; con . setAutoCommit ( true ) ; mOpenConnectionsLock . lock ( ) ; try { if ( mOpenConnections == null ) { con . close ( ) ; throw new FetchException ( "Repository is closed" ) ; } mOpenConnections . put ( con , null ) ; } finally { mOpenConnectionsLock . unlock ( ) ; } return con ; } catch ( Exception e ) { throw toFetchException ( e ) ; } |
public class OptionsApi { /** * Receive exist options .
* The GET operation will fetch CloudCluster / Options and merges it with person and sgent groups annexes .
* @ param personDbid DBID of a person . Options will be merged with the Person & # 39 ; s annex and annexes of it & # 39 ; s agent groups . Mutual with agent _ group _ dbid . ( optional )
* @ param agentGroupDbid DBID of a person . Options will be merged with the Agent Groups & # 39 ; s annex . Mutual with person _ dbid . ( optional )
* @ return OptionsGetResponseSuccess
* @ throws ApiException If fail to call the API , e . g . server error or cannot deserialize the response body */
public OptionsGetResponseSuccess optionsGet ( String personDbid , String agentGroupDbid ) throws ApiException { } } | ApiResponse < OptionsGetResponseSuccess > resp = optionsGetWithHttpInfo ( personDbid , agentGroupDbid ) ; return resp . getData ( ) ; |
public class AnnotationInfo { /** * アノテーションの属性を追加する 。
* < p > ただし 、 既に同じ属性名が存在する場合は 、 それと入れ替えされます 。 < / p >
* @ param name 属性名 。 必須です 。
* @ param value 値 。
* < a href = " http : / / s2container . seasar . org / 2.4 / ja / ognl . html " target = " _ blank " > OGNL形式 < / a > で指定します 。
* @ throws IllegalArgumentException name is empty . */
public void addAttribute ( final String name , final String value ) { } } | ArgUtils . notEmpty ( name , "name" ) ; removeAttribute ( name ) ; this . attributes . add ( AttributeInfo . create ( name , value ) ) ; |
public class BouncyCastleUtil { /** * Returns the certificate type of the given certificate .
* Please see { @ link # getCertificateType ( TBSCertificateStructure ,
* TrustedCertificates ) getCertificateType } for details for
* determining the certificate type .
* @ param cert the certificate to get the type of .
* @ param trustedCerts the trusted certificates to double check the
* { @ link GSIConstants # EEC GSIConstants . EEC }
* certificate against .
* @ return the certificate type as determined by
* { @ link # getCertificateType ( TBSCertificateStructure ,
* TrustedCertificates ) getCertificateType } .
* @ exception CertificateException if something goes wrong . */
public static GSIConstants . CertificateType getCertificateType ( X509Certificate cert , CertStore trustedCerts ) throws CertificateException { } } | try { TBSCertificateStructure crt = getTBSCertificateStructure ( cert ) ; GSIConstants . CertificateType type = getCertificateType ( crt ) ; // check subject of the cert in trusted cert list
// to make sure the cert is not a ca cert
if ( type == GSIConstants . CertificateType . EEC ) { X509CertSelector selector = new X509CertSelector ( ) ; selector . setSubject ( cert . getSubjectX500Principal ( ) ) ; Collection c = trustedCerts . getCertificates ( selector ) ; if ( c != null && c . size ( ) > 0 ) { type = GSIConstants . CertificateType . CA ; } } return type ; } catch ( Exception e ) { // but this should not happen
throw new CertificateException ( "" , e ) ; } |
public class ICalComponent { /** * Utility method for validating the { @ link Status } property of a component .
* @ param warnings the list to add the warnings to
* @ param allowed the valid statuses */
protected void checkStatus ( List < ValidationWarning > warnings , Status ... allowed ) { } } | Status actual = getProperty ( Status . class ) ; if ( actual == null ) { return ; } List < String > allowedValues = new ArrayList < String > ( allowed . length ) ; for ( Status status : allowed ) { String value = status . getValue ( ) . toLowerCase ( ) ; allowedValues . add ( value ) ; } String actualValue = actual . getValue ( ) . toLowerCase ( ) ; if ( ! allowedValues . contains ( actualValue ) ) { warnings . add ( new ValidationWarning ( 13 , actual . getValue ( ) , allowedValues ) ) ; } |
public class ULocale { /** * < strong > [ icu ] < / strong > Returns the locale ID localized for display in the provided locale .
* This is a cover for the ICU4C API .
* @ param localeID the locale whose name is to be displayed .
* @ param displayLocale the locale in which to display the locale name .
* @ return the localized locale name . */
public static String getDisplayName ( String localeID , ULocale displayLocale ) { } } | return getDisplayNameInternal ( new ULocale ( localeID ) , displayLocale ) ; |
public class OracleNoSQLSchemaManager { /** * Creates the index on table .
* @ param tableInfo
* the table info */
private void createIndexOnTable ( TableInfo tableInfo ) { } } | List < IndexInfo > indexColumns = tableInfo . getColumnsToBeIndexed ( ) ; for ( IndexInfo indexInfo : indexColumns ) { if ( indexInfo . getIndexType ( ) != null && indexInfo . getIndexType ( ) . toLowerCase ( ) . equals ( Constants . COMPOSITE ) ) { String [ ] columnNames = indexInfo . getColumnName ( ) . split ( Constants . COMMA ) ; createIndex ( tableInfo . getTableName ( ) , indexInfo . getIndexName ( ) , columnNames ) ; } else { createIndex ( tableInfo . getTableName ( ) , indexInfo . getIndexName ( ) , indexInfo . getColumnName ( ) ) ; } } |
public class UserContextResource { /** * Returns a new resource which represents the ActiveConnection Directory
* contained within the UserContext exposed by this UserContextResource .
* @ return
* A new resource which represents the ActiveConnection Directory
* contained within the UserContext exposed by this UserContextResource .
* @ throws GuacamoleException
* If an error occurs while retrieving the ActiveConnection Directory . */
@ Path ( "activeConnections" ) public DirectoryResource < ActiveConnection , APIActiveConnection > getActiveConnectionDirectoryResource ( ) throws GuacamoleException { } } | return activeConnectionDirectoryResourceFactory . create ( userContext , userContext . getActiveConnectionDirectory ( ) ) ; |
public class LoggingHandler { /** * Generates the default log message of the specified event whose argument is a { @ link ByteBufHolder } . */
private static String formatByteBufHolder ( ChannelHandlerContext ctx , String eventName , ByteBufHolder msg ) { } } | String chStr = ctx . channel ( ) . toString ( ) ; String msgStr = msg . toString ( ) ; ByteBuf content = msg . content ( ) ; int length = content . readableBytes ( ) ; if ( length == 0 ) { StringBuilder buf = new StringBuilder ( chStr . length ( ) + 1 + eventName . length ( ) + 2 + msgStr . length ( ) + 4 ) ; buf . append ( chStr ) . append ( ' ' ) . append ( eventName ) . append ( ", " ) . append ( msgStr ) . append ( ", 0B" ) ; return buf . toString ( ) ; } else { int rows = length / 16 + ( length % 15 == 0 ? 0 : 1 ) + 4 ; StringBuilder buf = new StringBuilder ( chStr . length ( ) + 1 + eventName . length ( ) + 2 + msgStr . length ( ) + 2 + 10 + 1 + 2 + rows * 80 ) ; buf . append ( chStr ) . append ( ' ' ) . append ( eventName ) . append ( ": " ) . append ( msgStr ) . append ( ", " ) . append ( length ) . append ( 'B' ) . append ( NEWLINE ) ; appendPrettyHexDump ( buf , content ) ; return buf . toString ( ) ; } |
public class ShrinkWrapMvnTemplateProviderElement { @ Override protected List < File > resolve ( String fqtn ) throws Exception { } } | if ( supportLogger . isDebugEnabled ( ) ) supportLogger . debug ( Logs . LOG_01 , ansiString ( GREEN , fqtn ) ) ; StringTokenizer tokenizer = new StringTokenizer ( fqtn , ":" , false ) ; String groupId = tokenizer . nextToken ( ) ; String templateId = tokenizer . nextToken ( ) ; String version = tokenizer . nextToken ( ) ; return mvnDependencyResolver . resolve ( groupId , templateId , version ) ; |
public class UserAccountHelper { /** * Similar to updateAccount , but narrowed to the password and re - tooled to work as the guest
* user ( which is what you are , when you have a valid security token ) . */
public void createPassword ( PersonForm form , String token ) { } } | final String username = form . getUsername ( ) ; // Re - validate the token to prevent URL hacking
if ( ! validateLoginToken ( username , token ) ) { throw new RuntimeException ( "Attempt to set a password for user '" + username + "' without a valid security token" ) ; } final String password = form . getPassword ( ) ; if ( StringUtils . isNotBlank ( password ) ) { if ( ! password . equals ( form . getConfirmPassword ( ) ) ) { throw new RuntimeException ( "Passwords don't match" ) ; } ILocalAccountPerson account = accountDao . getPerson ( username ) ; account . setPassword ( passwordService . encryptPassword ( password ) ) ; account . setLastPasswordChange ( new Date ( ) ) ; account . removeAttribute ( "loginToken" ) ; accountDao . updateAccount ( account ) ; if ( log . isInfoEnabled ( ) ) { log . info ( "Password created for account: " + account ) ; } } else { throw new RuntimeException ( "Attempt to set a password for user '" + form . getUsername ( ) + "' but the password was blank" ) ; } |
public class MPXJFormats { /** * Generate date patterns based on the project configuration .
* @ param properties project properties
* @ return date patterns */
private String [ ] getDatePatterns ( ProjectProperties properties ) { } } | String pattern = "" ; char datesep = properties . getDateSeparator ( ) ; DateOrder dateOrder = properties . getDateOrder ( ) ; switch ( dateOrder ) { case DMY : { pattern = "dd" + datesep + "MM" + datesep + "yy" ; break ; } case MDY : { pattern = "MM" + datesep + "dd" + datesep + "yy" ; break ; } case YMD : { pattern = "yy" + datesep + "MM" + datesep + "dd" ; break ; } } return new String [ ] { pattern } ; |
public class JMElasticsearchBulk { /** * Send with bulk processor .
* @ param source the source
* @ param index the index
* @ param type the type */
public void sendWithBulkProcessor ( Map < String , ? > source , String index , String type ) { } } | sendWithBulkProcessor ( source , index , type , null ) ; |
public class LocLogger { /** * Log a localized message at the TRACE level .
* @ param key
* the key used for localization
* @ param args
* optional arguments */
public void trace ( Enum < ? > key , Object ... args ) { } } | if ( ! logger . isTraceEnabled ( ) ) { return ; } String translatedMsg = imc . getMessage ( key , args ) ; MessageParameterObj mpo = new MessageParameterObj ( key , args ) ; if ( instanceofLAL ) { ( ( LocationAwareLogger ) logger ) . log ( LOCALIZED , FQCN , LocationAwareLogger . TRACE_INT , translatedMsg , args , null ) ; } else { logger . trace ( LOCALIZED , translatedMsg , mpo ) ; } |
public class EventConsumer { void reSubscribeByName ( EventChannelStruct event_channel_struct , String name ) { } } | Enumeration callback_structs = event_callback_map . elements ( ) ; while ( callback_structs . hasMoreElements ( ) ) { EventCallBackStruct callback_struct = ( EventCallBackStruct ) callback_structs . nextElement ( ) ; if ( callback_struct . channel_name . equals ( name ) ) { reSubscribe ( event_channel_struct , callback_struct ) ; } } |
public class BorderApplier { /** * { @ inheritDoc } */
public void apply ( HSSFCell cell , HSSFCellStyle cellStyle , Map < String , String > style ) { } } | for ( String pos : new String [ ] { TOP , RIGHT , BOTTOM , LEFT } ) { String posName = StringUtils . capitalize ( pos . toLowerCase ( ) ) ; // color
String colorAttr = BORDER + "-" + pos + "-" + COLOR ; HSSFColor poiColor = CssUtils . parseColor ( cell . getSheet ( ) . getWorkbook ( ) , style . get ( colorAttr ) ) ; if ( poiColor != null ) { try { MethodUtils . invokeMethod ( cellStyle , "set" + posName + "BorderColor" , poiColor . getIndex ( ) ) ; } catch ( Exception e ) { log . error ( "Set Border Color Error Caused." , e ) ; } } // width
int width = CssUtils . getInt ( style . get ( BORDER + "-" + pos + "-" + WIDTH ) ) ; String styleAttr = BORDER + "-" + pos + "-" + STYLE ; String styleValue = style . get ( styleAttr ) ; short shortValue = - 1 ; // empty or solid
if ( StringUtils . isBlank ( styleValue ) || "solid" . equals ( styleValue ) ) { if ( width > 2 ) { shortValue = CellStyle . BORDER_THICK ; } else if ( width > 1 ) { shortValue = CellStyle . BORDER_MEDIUM ; } else { shortValue = CellStyle . BORDER_THIN ; } } else if ( ArrayUtils . contains ( new String [ ] { NONE , HIDDEN } , styleValue ) ) { shortValue = CellStyle . BORDER_NONE ; } else if ( DOUBLE . equals ( styleValue ) ) { shortValue = CellStyle . BORDER_DOUBLE ; } else if ( DOTTED . equals ( styleValue ) ) { shortValue = CellStyle . BORDER_DOTTED ; } else if ( DASHED . equals ( styleValue ) ) { if ( width > 1 ) { shortValue = CellStyle . BORDER_MEDIUM_DASHED ; } else { shortValue = CellStyle . BORDER_DASHED ; } } // border style
if ( shortValue != - 1 ) { try { MethodUtils . invokeMethod ( cellStyle , "setBorder" + posName , shortValue ) ; } catch ( Exception e ) { log . error ( "Set Border Style Error Caused." , e ) ; } } } |
public class Train { /** * Do train .
* @ param fout the fout */
public void doTrain ( PrintWriter fout ) { } } | long start_train , end_train , elapsed_train ; long start_iter , end_iter , elapsed_iter ; // initialization
init ( ) ; double f = 0.0 ; // double old _ f ;
double xtol = 1.0e-16 ; int numIter = 0 ; // for L - BFGS
iprint [ 0 ] = model . option . debugLevel - 2 ; iprint [ 1 ] = model . option . debugLevel - 1 ; iflag [ 0 ] = 0 ; // counter
int i ; // get initial values for lambda
for ( i = 0 ; i < numFeatures ; i ++ ) { lambda [ i ] = model . option . initLambdaVal ; } System . out . println ( "Start to train ..." ) ; if ( model . option . isLogging ) { model . option . writeOptions ( fout ) ; fout . println ( "Start to train ..." ) ; } // starting time of the training process
start_train = System . currentTimeMillis ( ) ; double maxAccuracy = 0.0 ; int maxAccuracyIter = - 1 ; // the training loop
do { // starting time of iteration
start_iter = System . currentTimeMillis ( ) ; // call this to compute two things :
// 1 . log - likelihood value
// 2 . the gradient vector of log - likelihood function
f = computeLogLiGradient ( lambda , gradLogLi , numIter + 1 , fout ) ; // negate f and its gradient because L - BFGS minimizes the objective function
// while we would like to maximize it
f *= - 1 ; for ( i = 0 ; i < numFeatures ; i ++ ) { gradLogLi [ i ] *= - 1 ; } // calling L - BFGS
try { new LBFGS ( ) . lbfgs ( numFeatures , model . option . mForHessian , lambda , f , gradLogLi , false , diag , iprint , model . option . epsForConvergence , xtol , iflag ) ; } catch ( LBFGS . ExceptionWithIflag e ) { System . out . println ( "L-BFGS failed!" ) ; if ( model . option . isLogging ) { fout . println ( "L-BFGS failed!" ) ; } break ; } numIter ++ ; // get the end time of the current iteration
end_iter = System . currentTimeMillis ( ) ; elapsed_iter = end_iter - start_iter ; System . out . println ( "\tIteration elapsed: " + Double . toString ( ( double ) elapsed_iter / 1000 ) + " seconds" ) ; if ( model . option . isLogging ) { fout . println ( "\tIteration elapsed: " + Double . toString ( ( double ) elapsed_iter / 1000 ) + " seconds" ) ; } // evaluate during training
if ( model . option . evaluateDuringTraining ) { // inference on testing data
model . doInference ( model . data . tstData ) ; // evaluation
double accuracy = model . evaluation . evaluate ( fout ) ; if ( accuracy > maxAccuracy ) { maxAccuracy = accuracy ; maxAccuracyIter = numIter ; // save the best model towards testing evaluation
if ( model . option . saveBestModel ) { for ( i = 0 ; i < numFeatures ; i ++ ) { tempLambda [ i ] = lambda [ i ] ; } } } System . out . println ( "\tCurrent max accuracy: " + Double . toString ( maxAccuracy ) + " (at iteration " + Integer . toString ( maxAccuracyIter ) + ")" ) ; if ( model . option . isLogging ) { fout . println ( "\tCurrent max accuracy: " + Double . toString ( maxAccuracy ) + " (at iteration " + Integer . toString ( maxAccuracyIter ) + ")" ) ; } // get the end time of the current iteration
end_iter = System . currentTimeMillis ( ) ; elapsed_iter = end_iter - start_iter ; System . out . println ( "\tIteration elapsed (including testing & evaluation): " + Double . toString ( ( double ) elapsed_iter / 1000 ) + " seconds" ) ; if ( model . option . isLogging ) { fout . println ( "\tIteration elapsed (including testing & evaluation): " + Double . toString ( ( double ) elapsed_iter / 1000 ) + " seconds" ) ; fout . flush ( ) ; } } } while ( iflag [ 0 ] != 0 && numIter < model . option . numIterations ) ; // get the end time of the training process
end_train = System . currentTimeMillis ( ) ; elapsed_train = end_train - start_train ; System . out . println ( "\tThe training process elapsed: " + Double . toString ( ( double ) elapsed_train / 1000 ) + " seconds" ) ; if ( model . option . isLogging ) { fout . println ( "\tThe training process elapsed: " + Double . toString ( ( double ) elapsed_train / 1000 ) + " seconds" ) ; } if ( model . option . evaluateDuringTraining && model . option . saveBestModel ) { for ( i = 0 ; i < numFeatures ; i ++ ) { lambda [ i ] = tempLambda [ i ] ; } } |
public class HttpUrlConnectionRpcSession { /** * Reads a response . */
protected < T , E > T handleResponse ( final HttpURLConnection connection , final DataTypeDescriptor < T > datad , final DataTypeDescriptor < E > errord ) throws IOException { } } | connection . connect ( ) ; int status = connection . getResponseCode ( ) ; if ( status == HttpURLConnection . HTTP_OK ) { // It ' s a successful response , try to read the result .
return readResult ( connection , datad ) ; } else if ( status == APPLICATION_EXC_STATUS ) { // It ' s an expected application exception .
throw ( RuntimeException ) readApplicationException ( connection , errord ) ; } else { // Something bad happened , try to read the error message .
throw readError ( connection ) ; } |
public class CassandraSchemaMgr { /** * Create a new keyspace with the given name . The keyspace is created with parameters
* defined for our DBService instance , if any . This method should be used with a
* no - keyspace DB connection .
* @ param dbConn Database connection to use .
* @ param keyspace Name of new keyspace . */
public void createKeyspace ( DBConn dbConn , String keyspace ) { } } | m_logger . info ( "Creating Keyspace '{}'" , keyspace ) ; try { KsDef ksDef = setKeySpaceOptions ( keyspace ) ; dbConn . getClientSession ( ) . system_add_keyspace ( ksDef ) ; waitForSchemaPropagation ( dbConn ) ; Thread . sleep ( 1000 ) ; // wait for gossip to other Cassandra nodes
} catch ( Exception ex ) { String errMsg = "Failed to create Keyspace '" + keyspace + "'" ; m_logger . error ( errMsg , ex ) ; throw new RuntimeException ( errMsg , ex ) ; } |
public class Queues { /** * Retrieves queues in alphabetical order .
* @ throws HTTPException If the IronMQ service returns a status other than 200 OK .
* @ throws java . io . IOException If there is an error accessing the IronMQ server . */
public static ArrayList < QueueModel > getQueues ( Client client ) throws IOException { } } | return getQueues ( client , null , null , null ) ; |
public class ValidationProcessor { /** * / * package private */
void markAsProcessed ( final CtClass ctClass ) { } } | final ClassFile classFile = ctClass . getClassFile ( ) ; for ( final Object attributeObject : classFile . getAttributes ( ) ) { if ( attributeObject instanceof AnnotationsAttribute ) { final AnnotationsAttribute annotationAttribute = ( AnnotationsAttribute ) attributeObject ; final javassist . bytecode . annotation . Annotation annotation = annotationAttribute . getAnnotation ( PROCESSED_ANNOTATION_CLASS ) ; if ( annotation != null ) { return ; } } } final javassist . bytecode . annotation . Annotation annotation = new javassist . bytecode . annotation . Annotation ( PROCESSED_ANNOTATION_CLASS , classFile . getConstPool ( ) ) ; final AnnotationsAttribute annotationAttribute = new AnnotationsAttribute ( classFile . getConstPool ( ) , AnnotationsAttribute . visibleTag ) ; annotationAttribute . addAnnotation ( annotation ) ; classFile . addAttribute ( annotationAttribute ) ; |
public class PrettyPrint { /** * Prettyprint into a string , no type header . */
public static String printToStr ( Object o ) { } } | StringWriter sw = new StringWriter ( ) ; try { print ( o , sw , false ) ; } catch ( IOException e ) { sw . write ( "<<error>>" ) ; } return sw . toString ( ) . trim ( ) ; |
public class SemEvalReporter { /** * { @ inheritDoc } */
public String [ ] contextLabels ( String primaryKey ) { } } | // Get the mapping from primarykeys to context descriptors .
List < Assignment > primaryAssignments = assignmentMap . get ( primaryKey ) ; // Return an empty array if one does not exist .
if ( primaryAssignments == null ) return new String [ 0 ] ; // Copy the label assignments for each context id recorded . Here we
// assume that the largest _ context _ id = = ( # id ' s _ recorded - 1 ) .
String [ ] labels = new String [ primaryAssignments . size ( ) ] ; for ( Assignment assignment : primaryAssignments ) labels [ assignment . id ] = assignment . key ; return labels ; |
public class ServiceQuery { /** * Checks if there is still an ad group bid landscape page left to query .
* < p > This method is meant to be used with { @ link ServiceQuery # nextPage ( AdGroupBidLandscapePage ) } .
* The { @ code page } is necessary when using DataService , as its paging mechanism is different from
* other services . For details , see
* https : / / developers . google . com / adwords / api / docs / guides / bid - landscapes # paging _ through _ results .
* @ param page the ad group bid landscape page whose total number of landscape points will be used
* to determine if there is still a page left
* @ return true if there is still a page left */
public boolean hasNext ( @ Nullable AdGroupBidLandscapePage page ) { } } | if ( page == null ) { return true ; } return getTotalLandscapePointsInPage ( page ) >= pageSize ; |
public class FinishingOperationImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
@ Override public void eSet ( int featureID , Object newValue ) { } } | switch ( featureID ) { case AfplibPackage . FINISHING_OPERATION__FOP_TYPE : setFOpType ( ( Integer ) newValue ) ; return ; case AfplibPackage . FINISHING_OPERATION__REF_EDGE : setRefEdge ( ( Integer ) newValue ) ; return ; case AfplibPackage . FINISHING_OPERATION__FOP_CNT : setFOpCnt ( ( Integer ) newValue ) ; return ; case AfplibPackage . FINISHING_OPERATION__AX_OFFST : setAxOffst ( ( Integer ) newValue ) ; return ; case AfplibPackage . FINISHING_OPERATION__OP_POS : setOpPos ( ( byte [ ] ) newValue ) ; return ; } super . eSet ( featureID , newValue ) ; |
public class ExtractionFns { /** * Converts extractionFn to a QueryGranularity , if possible . This is the inverse of
* { @ link # fromQueryGranularity ( Granularity ) } .
* @ param extractionFn function
* @ return query granularity , or null if extractionFn cannot be translated */
public static Granularity toQueryGranularity ( final ExtractionFn extractionFn ) { } } | if ( extractionFn instanceof TimeFormatExtractionFn ) { final TimeFormatExtractionFn fn = ( TimeFormatExtractionFn ) extractionFn ; if ( fn . getFormat ( ) == null && fn . getTimeZone ( ) == null && fn . getLocale ( ) == null && fn . isAsMillis ( ) ) { return fn . getGranularity ( ) ; } } return null ; |
public class MyEntitiesValidationReport { /** * Creates a new report , with an entity added to it .
* @ param entityTypeId name of the entity
* @ param importable true if the entity is importable
* @ return this report */
public MyEntitiesValidationReport addEntity ( String entityTypeId , boolean importable ) { } } | sheetsImportable . put ( entityTypeId , importable ) ; valid = valid && importable ; if ( importable ) { fieldsImportable . put ( entityTypeId , new ArrayList < > ( ) ) ; fieldsUnknown . put ( entityTypeId , new ArrayList < > ( ) ) ; fieldsRequired . put ( entityTypeId , new ArrayList < > ( ) ) ; fieldsAvailable . put ( entityTypeId , new ArrayList < > ( ) ) ; importOrder . add ( entityTypeId ) ; } return this ; |
public class DbfRow { /** * Retrieves the value of the designated field as String
* using given charset .
* @ param fieldName the name of the field
* @ param charset the charset to be used to decode field value
* @ return the field value , or null ( if the dbf value is NULL )
* @ throws DbfException if there ' s no field with name fieldName */
public String getString ( String fieldName , Charset charset ) throws DbfException { } } | Object value = get ( fieldName ) ; return value == null ? null : new String ( trimLeftSpaces ( ( byte [ ] ) value ) , charset ) ; |
public class SiteInfoImpl { /** * initialize the NameSpaces from the given namespaceList
* @ param general
* @ param namespaceList */
protected void initNameSpaces ( General general , List < Ns > namespaceList ) { } } | namespaces = new LinkedHashMap < String , Ns > ( ) ; namespacesById = new LinkedHashMap < Integer , Ns > ( ) ; namespacesByCanonicalName = new LinkedHashMap < String , Ns > ( ) ; for ( Ns namespace : namespaceList ) { String namespacename = namespace . getValue ( ) ; namespaces . put ( namespacename , namespace ) ; namespacesById . put ( namespace . getId ( ) , namespace ) ; String canonical = namespace . getCanonical ( ) ; // this is a BUG in 2015-07-02 - some canonical names are not correct
// FIXME - when Bug has been fixed in SMW
String bugs [ ] = { "Attribut" , "Property" , "Konzept" , "Concept" , "Kategorie" , "Category" } ; for ( int i = 0 ; i < bugs . length ; i += 2 ) { if ( bugs [ i ] . equals ( canonical ) && bugs [ i ] . equals ( namespacename ) ) { canonical = bugs [ i + 1 ] ; namespace . setCanonical ( bugs [ i + 1 ] ) ; } } namespacesByCanonicalName . put ( canonical , namespace ) ; } |
public class Reflect { /** * Call the method .
* @ param < R > the type of the returned value .
* @ param instance the instance to call on .
* @ param type the type .
* @ param returnType the type of the returned value .
* @ param methodName the name of the method .
* @ param types the types of the parameters .
* @ param args the values of the arguments .
* @ return the value . */
public static < R > R callFunc ( Object instance , Class < ? > type , Class < R > returnType , String methodName , Class < ? > [ ] types , Object ... args ) { } } | try { final Method method = type . getDeclaredMethod ( methodName , types ) ; method . setAccessible ( true ) ; return returnType . cast ( method . invoke ( instance , args ) ) ; } catch ( Exception exception ) { throw new Error ( exception ) ; } |
public class ReflectionsTypeScanner { /** * Creates a new collection with abstract types which can not be instantiated . */
private Collection < Class < ? extends Saga > > removeAbstractTypes ( final Collection < Class < ? extends Saga > > foundTypes ) { } } | Collection < Class < ? extends Saga > > sagaTypes = new ArrayList < > ( ) ; for ( Class < ? extends Saga > entryType : foundTypes ) { if ( ! Modifier . isAbstract ( entryType . getModifiers ( ) ) ) { sagaTypes . add ( entryType ) ; } } return sagaTypes ; |
public class CPSpecificationOptionUtil { /** * Returns the first cp specification option in the ordered set where uuid = & # 63 ; .
* @ param uuid the uuid
* @ param orderByComparator the comparator to order the set by ( optionally < code > null < / code > )
* @ return the first matching cp specification option
* @ throws NoSuchCPSpecificationOptionException if a matching cp specification option could not be found */
public static CPSpecificationOption findByUuid_First ( String uuid , OrderByComparator < CPSpecificationOption > orderByComparator ) throws com . liferay . commerce . product . exception . NoSuchCPSpecificationOptionException { } } | return getPersistence ( ) . findByUuid_First ( uuid , orderByComparator ) ; |
public class ItemsUtil { /** * Checks the sequential validity of the given array of values .
* They must be unique , monotonically increasing and not null .
* @ param < T > the data type
* @ param values given array of values
* @ param comparator the comparator for data type T */
static final < T > void validateValues ( final T [ ] values , final Comparator < ? super T > comparator ) { } } | final int lenM1 = values . length - 1 ; for ( int j = 0 ; j < lenM1 ; j ++ ) { if ( ( values [ j ] != null ) && ( values [ j + 1 ] != null ) && ( comparator . compare ( values [ j ] , values [ j + 1 ] ) < 0 ) ) { continue ; } throw new SketchesArgumentException ( "Values must be unique, monotonically increasing and not null." ) ; } |
public class VirtualHubsInner { /** * Deletes a VirtualHub .
* @ param resourceGroupName The resource group name of the VirtualHub .
* @ param virtualHubName The name of the VirtualHub .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ throws ErrorException thrown if the request is rejected by server
* @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent */
public void delete ( String resourceGroupName , String virtualHubName ) { } } | deleteWithServiceResponseAsync ( resourceGroupName , virtualHubName ) . toBlocking ( ) . last ( ) . body ( ) ; |
public class IntIntMap { /** * Ensure that the hash can comfortably hold the specified number of elements . Calling this
* method is not necessary , but can improve performance if done prior to adding many elements . */
public void ensureCapacity ( int minCapacity ) { } } | int size = _buckets . length ; while ( minCapacity > ( int ) ( size * _loadFactor ) ) { size *= 2 ; } if ( size != _buckets . length ) { resizeBuckets ( size ) ; } |
public class ServerDef { /** * < pre >
* The cluster of which this server is a member .
* < / pre >
* < code > optional . tensorflow . ClusterDef cluster = 1 ; < / code > */
public org . tensorflow . distruntime . ClusterDef getCluster ( ) { } } | return cluster_ == null ? org . tensorflow . distruntime . ClusterDef . getDefaultInstance ( ) : cluster_ ; |
public class Ifc2x3tc1PackageImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
public EClass getIfcBoxedHalfSpace ( ) { } } | if ( ifcBoxedHalfSpaceEClass == null ) { ifcBoxedHalfSpaceEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( Ifc2x3tc1Package . eNS_URI ) . getEClassifiers ( ) . get ( 53 ) ; } return ifcBoxedHalfSpaceEClass ; |
public class AbstractCounter { /** * { @ inheritDoc } */
@ Override public DataPoint [ ] getSeries ( long timestampStartMs , long timestampEndMs , int steps , DataPoint . Type type ) { } } | DataPoint [ ] origin = getAllInRange ( timestampStartMs , timestampEndMs ) ; if ( steps < 1 ) { steps = 1 ; } if ( steps == 1 ) { return origin ; } int n = origin . length / steps ; if ( n * steps < origin . length ) { n ++ ; } DataPoint [ ] result = new DataPoint [ n ] ; int orgIndex = 0 ; int blockSize = steps * RESOLUTION_MS ; for ( DataPoint org : origin ) { int resultIndex = orgIndex / steps ; DataPoint block = result [ resultIndex ] ; if ( block == null ) { long t = org . timestamp ( ) ; /* as of 0.5.0 , the two following lines are commented out ! */
// long delta = t % blockSize ;
// t - = delta ;
block = new DataPoint ( DataPoint . Type . NONE , t , 0 , blockSize ) ; result [ resultIndex ] = block ; } if ( block . type ( ) == DataPoint . Type . NONE && org . type ( ) != DataPoint . Type . NONE ) { block . type ( type ) ; } block . add ( org ) ; orgIndex ++ ; } return result ; |
public class TrainingsImpl { /** * Add the provided images to the set of training images .
* This API accepts body content as multipart / form - data and application / octet - stream . When using multipart
* multiple image files can be sent at once , with a maximum of 64 files .
* @ param projectId The project id
* @ param imageData the InputStream value
* @ param createImagesFromDataOptionalParameter the object representing the optional parameters to be set before calling this API
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the observable to the ImageCreateSummary object */
public Observable < ServiceResponse < ImageCreateSummary > > createImagesFromDataWithServiceResponseAsync ( UUID projectId , byte [ ] imageData , CreateImagesFromDataOptionalParameter createImagesFromDataOptionalParameter ) { } } | if ( projectId == null ) { throw new IllegalArgumentException ( "Parameter projectId is required and cannot be null." ) ; } if ( imageData == null ) { throw new IllegalArgumentException ( "Parameter imageData is required and cannot be null." ) ; } if ( this . client . apiKey ( ) == null ) { throw new IllegalArgumentException ( "Parameter this.client.apiKey() is required and cannot be null." ) ; } final List < String > tagIds = createImagesFromDataOptionalParameter != null ? createImagesFromDataOptionalParameter . tagIds ( ) : null ; return createImagesFromDataWithServiceResponseAsync ( projectId , imageData , tagIds ) ; |
public class MpxjQuery { /** * This method performs a set of queries to retrieve information
* from the an MPP or an MPX file .
* @ param filename name of the MPX file
* @ throws Exception on file read error */
private static void query ( String filename ) throws Exception { } } | ProjectFile mpx = new UniversalProjectReader ( ) . read ( filename ) ; listProjectProperties ( mpx ) ; listResources ( mpx ) ; listTasks ( mpx ) ; listAssignments ( mpx ) ; listAssignmentsByTask ( mpx ) ; listAssignmentsByResource ( mpx ) ; listHierarchy ( mpx ) ; listTaskNotes ( mpx ) ; listResourceNotes ( mpx ) ; listRelationships ( mpx ) ; listSlack ( mpx ) ; listCalendars ( mpx ) ; |
public class CommonOps_DDRM { /** * < p > Performs the an element by element multiplication operation : < br >
* < br >
* a < sub > ij < / sub > = a < sub > ij < / sub > * b < sub > ij < / sub > < br >
* @ param a The left matrix in the multiplication operation . Modified .
* @ param b The right matrix in the multiplication operation . Not modified . */
public static void elementMult ( DMatrixD1 a , DMatrixD1 b ) { } } | if ( a . numCols != b . numCols || a . numRows != b . numRows ) { throw new MatrixDimensionException ( "The 'a' and 'b' matrices do not have compatible dimensions" ) ; } int length = a . getNumElements ( ) ; for ( int i = 0 ; i < length ; i ++ ) { a . times ( i , b . get ( i ) ) ; } |
public class ParsedUnionStmt { /** * Miscellaneous post parse activity
* @ param sql
* @ param joinOrder */
@ Override void postParse ( String sql , String joinOrder ) { } } | for ( AbstractParsedStmt selectStmt : m_children ) { selectStmt . postParse ( sql , joinOrder ) ; } m_sql = sql ; m_joinOrder = joinOrder ; |
public class CopyOnWriteArrayList { /** * Returns a { @ link ListIterator } that iterates over the elements of this
* list as they were at the time of this method call . Changes to the list
* made after this method call will not be reflected by the iterator , nor
* will they trigger a { @ link ConcurrentModificationException } .
* < p > The returned iterator does not support { @ link ListIterator # add } ,
* { @ link ListIterator # set } or { @ link Iterator # remove ( ) } , */
public ListIterator < E > listIterator ( int index ) { } } | Object [ ] snapshot = elements ; if ( index < 0 || index > snapshot . length ) { throw new IndexOutOfBoundsException ( "index=" + index + ", length=" + snapshot . length ) ; } CowIterator < E > result = new CowIterator < E > ( snapshot , 0 , snapshot . length ) ; result . index = index ; return result ; |
public class JMultiFieldPanel { /** * Set the value .
* @ param objValue The raw - date value of this component . */
public void setControlValue ( Object objValue ) { } } | m_converter . setData ( objValue , false , Constants . SCREEN_MOVE ) ; // DO NOT ! Display ( will loop )
for ( int i = this . getComponentCount ( ) - 1 ; i >= 0 ; i -- ) { JComponent component = ( JComponent ) this . getComponent ( i ) ; if ( ( i >= brgComponentsLinkedToConverter . length ) || ( brgComponentsLinkedToConverter [ i ] ) ) this . setControlValue ( objValue , component ) ; } |
public class AmazonCloudDirectoryClient { /** * Performs all the write operations in a batch . Either all the operations succeed or none .
* @ param batchWriteRequest
* @ return Result of the BatchWrite operation returned by the service .
* @ throws InternalServiceException
* Indicates a problem that must be resolved by Amazon Web Services . This might be a transient error in
* which case you can retry your request until it succeeds . Otherwise , go to the < a
* href = " http : / / status . aws . amazon . com / " > AWS Service Health Dashboard < / a > site to see if there are any
* operational issues with the service .
* @ throws InvalidArnException
* Indicates that the provided ARN value is not valid .
* @ throws RetryableConflictException
* Occurs when a conflict with a previous successful write is detected . For example , if a write operation
* occurs on an object and then an attempt is made to read the object using “ SERIALIZABLE ” consistency , this
* exception may result . This generally occurs when the previous write did not have time to propagate to the
* host serving the current request . A retry ( with appropriate backoff logic ) is the recommended response to
* this exception .
* @ throws ValidationException
* Indicates that your request is malformed in some manner . See the exception message .
* @ throws LimitExceededException
* Indicates that limits are exceeded . See < a
* href = " https : / / docs . aws . amazon . com / clouddirectory / latest / developerguide / limits . html " > Limits < / a > for more
* information .
* @ throws AccessDeniedException
* Access denied . Check your permissions .
* @ throws DirectoryNotEnabledException
* Operations are only permitted on enabled directories .
* @ throws BatchWriteException
* A < code > BatchWrite < / code > exception has occurred .
* @ sample AmazonCloudDirectory . BatchWrite
* @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / clouddirectory - 2017-01-11 / BatchWrite " target = " _ top " > AWS API
* Documentation < / a > */
@ Override public BatchWriteResult batchWrite ( BatchWriteRequest request ) { } } | request = beforeClientExecution ( request ) ; return executeBatchWrite ( request ) ; |
public class ZipFileSliceReader { /** * Get the 2GB chunk of the zipfile with the given chunk index .
* @ param chunkIdx
* the chunk index
* @ return the chunk
* @ throws IOException
* if an I / O exception occurs .
* @ throws InterruptedException
* if the thread was interrupted . */
private ByteBuffer getChunk ( final int chunkIdx ) throws IOException , InterruptedException { } } | ByteBuffer chunk = chunkCache [ chunkIdx ] ; if ( chunk == null ) { final ByteBuffer byteBufferDup = zipFileSlice . physicalZipFile . getByteBuffer ( chunkIdx ) . duplicate ( ) ; chunk = chunkCache [ chunkIdx ] = byteBufferDup ; } return chunk ; |
public class Cache2kBuilder { /** * Create a builder from the configuration . */
public static < K , T > Cache2kBuilder < K , T > of ( Cache2kConfiguration < K , T > c ) { } } | Cache2kBuilder < K , T > cb = new Cache2kBuilder < K , T > ( c ) ; return cb ; |
public class NumberFormatterBase { /** * Format a sequence of unit values . */
public void formatUnits ( List < UnitValue > values , StringBuilder destination , UnitFormatOptions options ) { } } | int size = values . size ( ) ; for ( int i = 0 ; i < size ; i ++ ) { if ( i > 0 ) { destination . append ( ' ' ) ; } formatUnit ( values . get ( i ) , destination , options ) ; } |
public class Matrix3fStack { /** * Increment the stack pointer by one and set the values of the new current matrix to the one directly below it .
* @ return this */
public Matrix3fStack pushMatrix ( ) { } } | if ( curr == mats . length ) { throw new IllegalStateException ( "max stack size of " + ( curr + 1 ) + " reached" ) ; // $ NON - NLS - 1 $ / / $ NON - NLS - 2 $
} mats [ curr ++ ] . set ( this ) ; return this ; |
public class JDBCConnection { /** * Adds another SQLWarning to this Connection object ' s warning chain .
* @ param w the SQLWarning to add to the chain */
void addWarning ( SQLWarning w ) { } } | // PRE : w is never null
synchronized ( rootWarning_mutex ) { if ( rootWarning == null ) { rootWarning = w ; } else { rootWarning . setNextWarning ( w ) ; } } |
public class ApiOvhCloud { /** * Get failover ip
* REST : GET / cloud / project / { serviceName } / ip / failover / { id }
* @ param id [ required ] Ip id
* @ param serviceName [ required ] Project id */
public OvhFailoverIp project_serviceName_ip_failover_id_GET ( String serviceName , String id ) throws IOException { } } | String qPath = "/cloud/project/{serviceName}/ip/failover/{id}" ; StringBuilder sb = path ( qPath , serviceName , id ) ; String resp = exec ( qPath , "GET" , sb . toString ( ) , null ) ; return convertTo ( resp , OvhFailoverIp . class ) ; |
public class FlowController { /** * Invoke the given action handler method , passing it an argument if appropriate .
* @ param method the action handler method to invoke .
* @ param arg the form - bean to pass ; may be < code > null < / code > .
* @ return the ActionForward returned by the action handler method .
* @ throws Exception if an Exception was raised in user code . */
protected ActionForward invokeActionMethod ( Method method , Object arg ) throws Exception { } } | return invokeActionMethod ( method , arg , getRequest ( ) , getActionMapping ( ) ) ; |
public class TimeColumn { /** * Returns the smallest ( " bottom " ) n values in the column , Does not change the order in this column
* @ param n The maximum number of records to return . The actual number will be smaller if n is greater than the
* number of observations in the column
* @ return A list , possibly empty , of the smallest n observations */
public List < LocalTime > bottom ( int n ) { } } | List < LocalTime > bottom = new ArrayList < > ( ) ; int [ ] values = data . toIntArray ( ) ; IntArrays . parallelQuickSort ( values ) ; int rowCount = 0 ; int validCount = 0 ; while ( validCount < n && rowCount < size ( ) ) { int value = values [ rowCount ] ; if ( value != TimeColumnType . missingValueIndicator ( ) ) { bottom . add ( PackedLocalTime . asLocalTime ( value ) ) ; validCount ++ ; } rowCount ++ ; } return bottom ; |
public class PreCompileMojo { /** * Execute goal .
* @ throws MojoExecutionException execution of the main class or one of the threads it generated failed .
* @ throws MojoFailureException something bad happened . . . */
public void execute ( ) throws MojoExecutionException , MojoFailureException { } } | if ( isSkip ( ) ) { getLog ( ) . info ( "skipping execute as per configuraion" ) ; return ; } if ( killAfter != - 1 ) { getLog ( ) . warn ( "Warning: killAfter is now deprecated. Do you need it ? Please comment on MEXEC-6." ) ; } arguments = new String [ ] { outputParentDirectory . getAbsolutePath ( ) , outputDirectory . getAbsolutePath ( ) , filterClassPackage } ; if ( getLog ( ) . isDebugEnabled ( ) ) { StringBuffer msg = new StringBuffer ( "Invoking : " ) ; msg . append ( mainClass ) ; msg . append ( ".main(" ) ; for ( int i = 0 ; i < arguments . length ; i ++ ) { if ( i > 0 ) { msg . append ( ", " ) ; } msg . append ( arguments [ i ] ) ; } msg . append ( ")" ) ; getLog ( ) . debug ( msg ) ; } final Log log = getLog ( ) ; IsolatedThreadGroup threadGroup = new IsolatedThreadGroup ( mainClass /* name */
) ; Thread bootstrapThread = new Thread ( threadGroup , new Runnable ( ) { public void run ( ) { long current = System . currentTimeMillis ( ) ; try { Method main = Thread . currentThread ( ) . getContextClassLoader ( ) . loadClass ( mainClass ) . getMethod ( "main" , new Class [ ] { String [ ] . class } ) ; if ( ! main . isAccessible ( ) ) { getLog ( ) . debug ( "Setting accessibility to true in order to invoke main()." ) ; main . setAccessible ( true ) ; } if ( ! Modifier . isStatic ( main . getModifiers ( ) ) ) { throw new MojoExecutionException ( "Can't call main(String[])-method because it is not static." ) ; } main . invoke ( null , new Object [ ] { arguments } ) ; } catch ( NoSuchMethodException e ) { // just pass it on
Thread . currentThread ( ) . getThreadGroup ( ) . uncaughtException ( Thread . currentThread ( ) , new Exception ( "The specified mainClass doesn't contain a main method with appropriate signature." , e ) ) ; } catch ( Exception e ) { // just pass it on
Thread . currentThread ( ) . getThreadGroup ( ) . uncaughtException ( Thread . currentThread ( ) , e ) ; } finally { log . info ( "JProtobuf pre compile done time took: " + ( System . currentTimeMillis ( ) - current ) + "ms" ) ; } } } , mainClass + ".main()" ) ; bootstrapThread . setContextClassLoader ( getClassLoader ( ) ) ; setSystemProperties ( ) ; bootstrapThread . start ( ) ; joinNonDaemonThreads ( threadGroup ) ; // It ' s plausible that spontaneously a non - daemon thread might be created as we try and shut down ,
// but it ' s too late since the termination condition ( only daemon threads ) has been triggered .
if ( keepAlive ) { getLog ( ) . warn ( "Warning: keepAlive is now deprecated and obsolete. Do you need it? Please comment on MEXEC-6." ) ; waitFor ( 0 ) ; } if ( cleanupDaemonThreads ) { terminateThreads ( threadGroup ) ; try { threadGroup . destroy ( ) ; } catch ( IllegalThreadStateException e ) { getLog ( ) . warn ( "Couldn't destroy threadgroup " + threadGroup , e ) ; } } if ( originalSystemProperties != null ) { System . setProperties ( originalSystemProperties ) ; } synchronized ( threadGroup ) { if ( threadGroup . uncaughtException != null ) { throw new MojoExecutionException ( "An exception occured while executing the Java class. " + threadGroup . uncaughtException . getMessage ( ) , threadGroup . uncaughtException ) ; } } registerSourceRoots ( ) ; |
public class ListSSHPublicKeysResult { /** * A list of the SSH public keys assigned to IAM user .
* @ return A list of the SSH public keys assigned to IAM user . */
public java . util . List < SSHPublicKeyMetadata > getSSHPublicKeys ( ) { } } | if ( sSHPublicKeys == null ) { sSHPublicKeys = new com . amazonaws . internal . SdkInternalList < SSHPublicKeyMetadata > ( ) ; } return sSHPublicKeys ; |
public class TLSUtils { /** * Disable the hostname verification of TLS certificates .
* < b > Warning : < / b > Use with care . This disables hostname verification of TLS certificates and essentially
* < b > invalidates all security guarantees provided by TLS < / b > . Only use this method if you understand the
* implications .
* @ param builder a connection configuration builder .
* @ param < B > Type of the ConnectionConfiguration builder .
* @ return the given builder . */
public static < B extends ConnectionConfiguration . Builder < B , ? > > B disableHostnameVerificationForTlsCertificates ( B builder ) { } } | builder . setHostnameVerifier ( DOES_NOT_VERIFY_VERIFIER ) ; return builder ; |
public class HandshakeReader { /** * Read the response body */
private byte [ ] readBody ( Map < String , List < String > > headers , WebSocketInputStream input ) { } } | // Get the value of " Content - Length " header .
int length = getContentLength ( headers ) ; if ( length <= 0 ) { // Response body is not available .
return null ; } try { // Allocate a byte array of the content length .
byte [ ] body = new byte [ length ] ; // Read the response body into the byte array .
input . readBytes ( body , length ) ; // Return the content of the response body .
return body ; } catch ( Throwable t ) { // Response body is not available .
return null ; } |
public class Emitter { /** * Transforms lines into HTML .
* @ param out
* The StringBuilder to write to .
* @ param block
* The Block to process . */
private void emitLines ( final StringBuilder out , final Block block ) { } } | switch ( block . type ) { case CODE : this . emitCodeLines ( out , block . lines , block . meta , true ) ; break ; case FENCED_CODE : this . emitCodeLines ( out , block . lines , block . meta , false ) ; break ; case XML : this . emitRawLines ( out , block . lines ) ; break ; default : this . emitMarkedLines ( out , block . lines ) ; break ; } |
public class Or { /** * Returns { @ code true } if at least on filter returns { @ code true } .
* @ param entity the entity to filter .
* @ return { @ code true } at least one filter returns { @ code true } . */
@ Override public boolean filter ( Object entity ) { } } | for ( Filter filter : filters ) { if ( filter . filter ( entity ) ) { return true ; } } return false ; |
public class TernaryTree { /** * Recursively insert the median first and then the median of the
* lower and upper halves , and so on in order to get a balanced
* tree . The array of keys is assumed to be sorted in ascending
* order . */
protected void insertBalanced ( String [ ] k , char [ ] v , int offset , int n ) { } } | int m ; if ( n < 1 ) { return ; } m = n >> 1 ; insert ( k [ m + offset ] , v [ m + offset ] ) ; insertBalanced ( k , v , offset , m ) ; insertBalanced ( k , v , offset + m + 1 , n - m - 1 ) ; |
public class CommerceNotificationTemplatePersistenceImpl { /** * Returns a range of all the commerce notification templates where groupId = & # 63 ; and enabled = & # 63 ; .
* Useful when paginating results . Returns a maximum of < code > end - start < / code > instances . < code > start < / code > and < code > end < / code > are not primary keys , they are indexes in the result set . Thus , < code > 0 < / code > refers to the first result in the set . Setting both < code > start < / code > and < code > end < / code > to { @ link QueryUtil # ALL _ POS } will return the full result set . If < code > orderByComparator < / code > is specified , then the query will include the given ORDER BY logic . If < code > orderByComparator < / code > is absent and pagination is required ( < code > start < / code > and < code > end < / code > are not { @ link QueryUtil # ALL _ POS } ) , then the query will include the default ORDER BY logic from { @ link CommerceNotificationTemplateModelImpl } . If both < code > orderByComparator < / code > and pagination are absent , for performance reasons , the query will not have an ORDER BY clause and the returned result set will be sorted on by the primary key in an ascending order .
* @ param groupId the group ID
* @ param enabled the enabled
* @ param start the lower bound of the range of commerce notification templates
* @ param end the upper bound of the range of commerce notification templates ( not inclusive )
* @ return the range of matching commerce notification templates */
@ Override public List < CommerceNotificationTemplate > findByG_E ( long groupId , boolean enabled , int start , int end ) { } } | return findByG_E ( groupId , enabled , start , end , null ) ; |
public class SsdpClientImpl { /** * Handle discovery response Datagrams .
* @ param response the incoming response */
private void handleDiscoveryResponse ( SsdpResponse response ) { } } | SsdpService ssdpService = response . toService ( ) ; if ( ssdpService . getSerialNumber ( ) == null ) { callback . onFailed ( new NoSerialNumberException ( ) ) ; return ; } if ( ! cache . containsKey ( ssdpService . getSerialNumber ( ) ) ) { callback . onServiceDiscovered ( ssdpService ) ; } cache . put ( ssdpService . getSerialNumber ( ) , ssdpService ) ; |
public class SimpleMonthView { /** * Attempts to restore accessibility focus to the specified date .
* @ param day The date which should receive focus
* @ return { @ code false } if the date is not valid for this month view , or
* { @ code true } if the date received focus */
public boolean restoreAccessibilityFocus ( CalendarDay day ) { } } | if ( ( day . year != mYear ) || ( day . month != mMonth ) || ( day . day > mNumCells ) ) { return false ; } mNodeProvider . setFocusedItem ( day ) ; return true ; |
public class SingleEvaluatedMoveCache { /** * Clear all cached values . */
@ Override public final void clear ( ) { } } | evaluatedMove = null ; evaluation = null ; validatedMove = null ; validation = null ; |
public class TileSparklineSkin { /** * * * * * * Smoothing * * * * * */
public void smooth ( final List < Double > DATA_LIST ) { } } | int size = DATA_LIST . size ( ) ; double [ ] x = new double [ size ] ; double [ ] y = new double [ size ] ; low = Statistics . getMin ( DATA_LIST ) ; high = Statistics . getMax ( DATA_LIST ) ; if ( Double . compare ( low , high ) == 0 ) { low = minValue ; high = maxValue ; } range = high - low ; double minX = graphBounds . getX ( ) ; double maxX = minX + graphBounds . getWidth ( ) ; double minY = graphBounds . getY ( ) ; double maxY = minY + graphBounds . getHeight ( ) ; double stepX = graphBounds . getWidth ( ) / ( noOfDatapoints - 1 ) ; double stepY = graphBounds . getHeight ( ) / range ; for ( int i = 0 ; i < size ; i ++ ) { x [ i ] = minX + i * stepX ; y [ i ] = maxY - Math . abs ( low - DATA_LIST . get ( i ) ) * stepY ; } Pair < Double [ ] , Double [ ] > px = computeControlPoints ( x ) ; Pair < Double [ ] , Double [ ] > py = computeControlPoints ( y ) ; sparkLine . getElements ( ) . clear ( ) ; for ( int i = 0 ; i < size - 1 ; i ++ ) { sparkLine . getElements ( ) . add ( new MoveTo ( x [ i ] , y [ i ] ) ) ; sparkLine . getElements ( ) . add ( new CubicCurveTo ( px . getKey ( ) [ i ] , py . getKey ( ) [ i ] , px . getValue ( ) [ i ] , py . getValue ( ) [ i ] , x [ i + 1 ] , y [ i + 1 ] ) ) ; } dot . setCenterX ( maxX ) ; dot . setCenterY ( y [ size - 1 ] ) ; |
public class SqlManagerImpl { /** * { @ inheritDoc } */
public < T > T getSingleResult ( Class < T > clazz , SqlResource resource ) { } } | return getSingleResult ( clazz , resource , null ) ; |
public class GroupDeviceElement { /** * command _ inout _ i - access limited to package Group */
@ Override int command_inout_asynch_i ( final String c , final boolean fgt , final boolean fwd , final int rid ) throws DevFailed { } } | try { final int actual_rid = proxy . command_inout_asynch ( c , fgt ) ; if ( fgt == false ) { arp . put ( new Integer ( rid ) , new AsynchRequest ( actual_rid , c ) ) ; } } catch ( final DevFailed df ) { if ( fgt == false ) { arp . put ( new Integer ( rid ) , new AsynchRequest ( - 1 , c , df ) ) ; } } catch ( final Exception e ) { if ( fgt == false ) { final DevError [ ] errors = new DevError [ 1 ] ; errors [ 0 ] = new DevError ( ) ; errors [ 0 ] . severity = ErrSeverity . ERR ; errors [ 0 ] . reason = "unknown exception caught" ; errors [ 0 ] . desc = "unknown error" ; errors [ 0 ] . origin = "GroupDeviceElemnt.command_inout" ; final DevFailed ex = new DevFailed ( errors ) ; arp . put ( new Integer ( rid ) , new AsynchRequest ( - 1 , c , ex ) ) ; } } return rid ; |
public class PortletResourceResponseContextImpl { /** * Handles resource response specific headers . Returns true if the header was consumed by this
* method and requires no further processing
* @ return */
protected boolean handleResourceHeader ( String key , String value ) { } } | if ( ResourceResponse . HTTP_STATUS_CODE . equals ( key ) ) { this . portletResourceOutputHandler . setStatus ( Integer . parseInt ( value ) ) ; return true ; } if ( "Content-Type" . equals ( key ) ) { final ContentType contentType = ContentType . parse ( value ) ; final Charset charset = contentType . getCharset ( ) ; if ( charset != null ) { this . portletResourceOutputHandler . setCharacterEncoding ( charset . name ( ) ) ; } this . portletResourceOutputHandler . setContentType ( contentType . getMimeType ( ) ) ; return true ; } if ( "Content-Length" . equals ( key ) ) { this . portletResourceOutputHandler . setContentLength ( Integer . parseInt ( value ) ) ; return true ; } if ( "Content-Language" . equals ( key ) ) { final HeaderElement [ ] parts = BasicHeaderValueParser . parseElements ( value , null ) ; if ( parts . length > 0 ) { final String localeStr = parts [ 0 ] . getValue ( ) ; final Locale locale = LocaleUtils . toLocale ( localeStr ) ; this . portletResourceOutputHandler . setLocale ( locale ) ; return true ; } } return false ; |
public class InternalXbaseWithAnnotationsLexer { /** * $ ANTLR start " T _ _ 42" */
public final void mT__42 ( ) throws RecognitionException { } } | try { int _type = T__42 ; int _channel = DEFAULT_TOKEN_CHANNEL ; // InternalXbaseWithAnnotations . g : 40:7 : ( ' - - ' )
// InternalXbaseWithAnnotations . g : 40:9 : ' - - '
{ match ( "--" ) ; } state . type = _type ; state . channel = _channel ; } finally { } |
public class VerificationConditionGenerator { /** * Translate a fail statement . Execution should never reach such a statement .
* Hence , we need to emit a verification condition to ensure this is the case .
* @ param stmt
* @ param context
* @ return */
private Context translateFail ( WyilFile . Stmt . Fail stmt , Context context ) { } } | Expr condition = new Expr . Constant ( new Value . Bool ( false ) ) ; VerificationCondition verificationCondition = new VerificationCondition ( "possible panic" , context . assumptions , condition , stmt . getParent ( WyilFile . Attribute . Span . class ) ) ; context . emit ( verificationCondition ) ; return null ; |
public class Matcher { /** * Gets a list of the matches in the order in which they occur
* in a matching input string
* @ return the matches */
public List < String > orderedGroups ( ) { } } | int groupCount = groupCount ( ) ; List < String > groups = new ArrayList < String > ( groupCount ) ; for ( int i = 1 ; i <= groupCount ; i ++ ) { groups . add ( group ( i ) ) ; } return groups ; |
public class SqlQueryStatement { /** * Get the FieldDescriptors of the extent based on the FieldDescriptors of the parent . */
private FieldDescriptor [ ] getExtentFieldDescriptors ( TableAlias extAlias , FieldDescriptor [ ] fds ) { } } | FieldDescriptor [ ] result = new FieldDescriptor [ fds . length ] ; for ( int i = 0 ; i < fds . length ; i ++ ) { result [ i ] = extAlias . cld . getFieldDescriptorByName ( fds [ i ] . getAttributeName ( ) ) ; } return result ; |
public class GwtMockito { /** * Returns a new fake object of the given type assuming a fake provider is
* available for that type . Additional fake providers can be registered via
* { @ link # useProviderForType } .
* @ param type type to get a fake object for
* @ return a fake of the given type , as returned by an applicable provider
* @ throws IllegalArgumentException if no provider for the given type ( or one
* of its superclasses ) has been registered */
public static < T > T getFake ( Class < T > type ) { } } | // If initMocks hasn ' t been called , read from the default fake provider map . This allows static
// fields to be initialized with fakes in tests that don ' t use the GwtMockito test runner .
T fake = getFakeFromProviderMap ( type , bridge != null ? bridge . registeredProviders : DEFAULT_FAKE_PROVIDERS ) ; if ( fake == null ) { throw new IllegalArgumentException ( "No fake provider has been registered " + "for " + type . getSimpleName ( ) + ". Call useProviderForType to " + "register a provider before calling getFake." ) ; } return fake ; |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.