signature
stringlengths 43
39.1k
| implementation
stringlengths 0
450k
|
|---|---|
public class AbstractTreeNode { /** * Fire the event for the removed node child .
* @ param event the event . */
void firePropertyChildRemoved ( TreeNodeRemovedEvent event ) { } }
|
if ( this . nodeListeners != null ) { for ( final TreeNodeListener listener : this . nodeListeners ) { if ( listener != null ) { listener . treeNodeChildRemoved ( event ) ; } } } final N parentNode = getParentNode ( ) ; if ( parentNode != null ) { parentNode . firePropertyChildRemoved ( event ) ; }
|
public class AnalyticsServiceElasticsearch { /** * This method builds a map of communication summary stats related to the supplied
* criteria .
* @ param stats The map of communication summary stats
* @ param index The index
* @ param criteria The criteria
* @ param addMetrics Whether to add metrics on the nodes / links */
private void buildCommunicationSummaryStatistics ( Map < String , CommunicationSummaryStatistics > stats , String index , Criteria criteria , boolean addMetrics ) { } }
|
if ( ! refresh ( index ) ) { return ; } // Don ' t specify target class , so that query provided that can be used with
// CommunicationDetails and CompletionTime
BoolQueryBuilder query = buildQuery ( criteria , ElasticsearchUtil . TRANSACTION_FIELD , null ) ; // Only want external communications
query = query . mustNot ( QueryBuilders . matchQuery ( "internal" , "true" ) ) ; StatsBuilder latencyBuilder = AggregationBuilders . stats ( "latency" ) . field ( ElasticsearchUtil . LATENCY_FIELD ) ; TermsBuilder targetBuilder = AggregationBuilders . terms ( "target" ) . field ( ElasticsearchUtil . TARGET_FIELD ) . size ( criteria . getMaxResponseSize ( ) ) . subAggregation ( latencyBuilder ) ; TermsBuilder sourceBuilder = AggregationBuilders . terms ( "source" ) . field ( ElasticsearchUtil . SOURCE_FIELD ) . size ( criteria . getMaxResponseSize ( ) ) . subAggregation ( targetBuilder ) ; SearchRequestBuilder request = getBaseSearchRequestBuilder ( COMMUNICATION_DETAILS_TYPE , index , criteria , query , 0 ) . addAggregation ( sourceBuilder ) ; SearchResponse response = getSearchResponse ( request ) ; for ( Terms . Bucket sourceBucket : response . getAggregations ( ) . < Terms > get ( "source" ) . getBuckets ( ) ) { Terms targets = sourceBucket . getAggregations ( ) . get ( "target" ) ; CommunicationSummaryStatistics css = stats . get ( sourceBucket . getKey ( ) ) ; if ( css == null ) { css = new CommunicationSummaryStatistics ( ) ; css . setId ( sourceBucket . getKey ( ) ) ; css . setUri ( EndpointUtil . decodeEndpointURI ( css . getId ( ) ) ) ; css . setOperation ( EndpointUtil . decodeEndpointOperation ( css . getId ( ) , true ) ) ; stats . put ( css . getId ( ) , css ) ; } if ( addMetrics ) { css . setCount ( sourceBucket . getDocCount ( ) ) ; } for ( Terms . Bucket targetBucket : targets . getBuckets ( ) ) { Stats latency = targetBucket . getAggregations ( ) . get ( "latency" ) ; String linkId = targetBucket . getKey ( ) ; ConnectionStatistics con = css . getOutbound ( ) . get ( linkId ) ; if ( con == null ) { con = new ConnectionStatistics ( ) ; css . getOutbound ( ) . put ( linkId , con ) ; } if ( addMetrics ) { con . setMinimumLatency ( ( long ) latency . getMin ( ) ) ; con . setAverageLatency ( ( long ) latency . getAvg ( ) ) ; con . setMaximumLatency ( ( long ) latency . getMax ( ) ) ; con . setCount ( targetBucket . getDocCount ( ) ) ; } } } addNodeInformation ( stats , index , criteria , addMetrics , false ) ; addNodeInformation ( stats , index , criteria , addMetrics , true ) ;
|
public class UserInstanceManagerImpl { /** * Returns the UserInstance object that is associated with the given request .
* @ param request Incoming HttpServletRequest
* @ return UserInstance object associated with the given request */
@ Override public IUserInstance getUserInstance ( HttpServletRequest request ) throws PortalException { } }
|
try { request = this . portalRequestUtils . getOriginalPortalRequest ( request ) ; } catch ( IllegalArgumentException iae ) { // ignore , just means that this isn ' t a wrapped request
} // Use request attributes first for the fastest possible retrieval
IUserInstance userInstance = ( IUserInstance ) request . getAttribute ( KEY ) ; if ( userInstance != null ) { return userInstance ; } final IPerson person ; try { // Retrieve the person object that is associated with the request
person = this . personManager . getPerson ( request ) ; } catch ( Exception e ) { logger . error ( "Exception while retrieving IPerson!" , e ) ; throw new PortalSecurityException ( "Could not retrieve IPerson" , e ) ; } if ( person == null ) { throw new PortalSecurityException ( "PersonManager returned null person for this request. With no user, there's no UserInstance. Is PersonManager misconfigured? RDBMS access misconfigured?" ) ; } final HttpSession session = request . getSession ( ) ; if ( session == null ) { throw new IllegalStateException ( "HttpServletRequest.getSession() returned a null session for request: " + request ) ; } // Return the UserInstance object if it ' s in the session
UserInstanceHolder userInstanceHolder = getUserInstanceHolder ( session ) ; if ( userInstanceHolder != null ) { userInstance = userInstanceHolder . getUserInstance ( ) ; if ( userInstance != null ) { return userInstance ; } } // Create either a UserInstance or a GuestUserInstance
final LocaleManager localeManager = this . getLocaleManager ( request , person ) ; final String userAgent = this . getUserAgent ( request ) ; final IUserProfile userProfile = this . getUserProfile ( request , person , localeManager , userAgent ) ; // Create the user layout manager and user instance object
IUserLayoutManager userLayoutManager = userLayoutManagerFactory . getUserLayoutManager ( person , userProfile ) ; final UserPreferencesManager userPreferencesManager = new UserPreferencesManager ( person , userProfile , userLayoutManager ) ; userInstance = new UserInstance ( person , userPreferencesManager , localeManager ) ; // Ensure the newly created UserInstance is cached in the session
if ( userInstanceHolder == null ) { userInstanceHolder = new UserInstanceHolder ( ) ; } userInstanceHolder . setUserInstance ( userInstance ) ; session . setAttribute ( KEY , userInstanceHolder ) ; request . setAttribute ( KEY , userInstance ) ; // Return the new UserInstance
return userInstance ;
|
public class JDK8TriggerBuilder { /** * Add the given key - value pair to the Trigger ' s { @ link JobDataMap } .
* @ param dataKey
* Job data key .
* @ param value
* Job data value
* @ return the updated JDK8TriggerBuilder
* @ see ITrigger # getJobDataMap ( ) */
@ Nonnull public JDK8TriggerBuilder < T > usingJobData ( final String dataKey , final Object value ) { } }
|
m_aJobDataMap . put ( dataKey , value ) ; return this ;
|
public class ScatterChartPanel { /** * Sets the operation mode .
* In zero - based mode coordinate axes always start at 0 and end near the maximum value of the corresponding dimension .
* Otherwise axes start at the minimum value .
* @ param zeroBased Operation mode */
public void setZeroBased ( boolean zeroBased ) { } }
|
if ( zeroBased != this . zeroBased ) { this . zeroBased = zeroBased ; for ( ValueDimension dim : tickInfo . keySet ( ) ) tickInfo . get ( dim ) . setZeroBased ( zeroBased ) ; }
|
public class InteropFramework { /** * Top level entry point of this class , when called from the command line .
* See method { @ link CommandLineArguments # main ( String [ ] ) } */
public int run ( ) { } }
|
if ( outfile == null && compare == null ) return CommandLineArguments . STATUS_NO_OUTPUT_OR_COMPARISON ; if ( infile == null && generator == null && merge == null ) return CommandLineArguments . STATUS_NO_INPUT ; if ( ( infile == "-" ) && ( bindings == "-" ) ) throw new InteropException ( "Cannot use standard input for both infile and bindings" ) ; Document doc ; if ( infile != null ) { doc = doReadDocument ( infile , informat ) ; } else if ( merge != null ) { IndexedDocument iDoc = new IndexedDocument ( pFactory , pFactory . newDocument ( ) , flatten != null ) ; try { List < ToRead > files ; if ( merge . equals ( "-" ) ) { files = readIndexFile ( System . in ) ; } else { files = readIndexFile ( new File ( merge ) ) ; } System . err . println ( "files to merge " + files ) ; for ( ToRead something : files ) { iDoc . merge ( readDocument ( something ) ) ; } } catch ( IOException e ) { System . err . println ( "problem reading index file" ) ; e . printStackTrace ( ) ; } doc = iDoc . toDocument ( ) ; } else { String [ ] options = generator . split ( ":" ) ; String noOfNodes = getOption ( options , 0 ) ; String noOfEdges = getOption ( options , 1 ) ; String firstNode = getOption ( options , 2 ) ; String namespace = "http://expample.org/" ; String seed = getOption ( options , 3 ) ; String term = getOption ( options , 4 ) ; if ( term == null ) term = "e1" ; GeneratorDetails gd = new GeneratorDetails ( Integer . valueOf ( noOfNodes ) , Integer . valueOf ( noOfEdges ) , firstNode , namespace , ( seed == null ) ? null : Long . valueOf ( seed ) , term ) ; System . err . println ( gd ) ; GraphGenerator gg = new GraphGenerator ( gd , pFactory ) ; gg . generateElements ( ) ; doc = gg . getDetails ( ) . getDocument ( ) ; } if ( compare != null ) { return doCompare ( doc , doReadDocument ( compare , informat ) ) ; } if ( template != null ) { BindingsBeanGenerator bbgen = new BindingsBeanGenerator ( pFactory ) ; boolean val = bbgen . generate ( doc , template , packge , outfile , location ) ; return ( val ) ? 0 : CommandLineArguments . STATUS_BEAN_GENERATION ; } if ( index != null ) { Document indexedDoc = new IndexedDocument ( pFactory , doc , ( flatten != null ) ) . toDocument ( ) ; doc = indexedDoc ; } if ( bindings != null ) { Expand myExpand = new Expand ( pFactory , addOrderp , allExpanded ) ; Document expanded ; System . err . println ( "bindings version is " + bindingsVersion ) ; if ( bindingsVersion == 3 ) { Bindings bb = BindingsJson . fromBean ( BindingsJson . importBean ( new File ( bindings ) ) , pFactory ) ; expanded = myExpand . expander ( doc , bb ) ; } else { Document docBindings = ( Document ) doReadDocument ( bindings , bindingformat ) ; expanded = myExpand . expander ( doc , outfile , docBindings ) ; } boolean flag = myExpand . getAllExpanded ( ) ; doWriteDocument ( outfile , outformat , expanded ) ; if ( ! flag ) { return CommandLineArguments . STATUS_TEMPLATE_UNBOUND_VARIABLE ; } } else { doWriteDocument ( outfile , outformat , doc ) ; } return CommandLineArguments . STATUS_OK ;
|
public class PersistentFactory { /** * Creates the specified object with the data provided in the specified state under
* the governance of the specified transaction .
* @ param xaction the transaction governing this event
* @ param state the new state for the new object
* @ throws PersistenceException an error occurred talking to the data store , or
* creates are not supported */
public T create ( Transaction xaction , Map < String , Object > state ) throws PersistenceException { } }
|
if ( create == null ) { synchronized ( this ) { while ( create == null ) { compileCreator ( ) ; try { wait ( 1000L ) ; } catch ( InterruptedException ignore ) { /* ignore this */
} } } } state . put ( "--key--" , getKey ( ) ) ; xaction . execute ( create , state ) ; if ( dependency != null ) { dependency . createDependencies ( xaction , state ) ; } return cache . find ( state ) ;
|
public class ValidationResult { /** * Adds a validation message to the result .
* @ param message a validation message to be added */
private void addMessage ( ValidationMessage < Origin > message ) { } }
|
if ( message == null ) { return ; } if ( null != defaultOrigin ) { message . addOrigin ( defaultOrigin ) ; } this . messages . add ( message ) ;
|
public class V1InstanceCreator { /** * Create a new ChangeSet with a name and reference .
* @ param name Initial name .
* @ param reference Reference value .
* @ return A newly minted ChangeSet that exists in the VersionOne system . */
public ChangeSet changeSet ( String name , String reference ) { } }
|
return changeSet ( name , reference , null ) ;
|
public class DynamoDBMapper { /** * Queries an Amazon DynamoDB table and returns the matching results as an
* unmodifiable list of instantiated objects . The table to query is
* determined by looking at the annotations on the specified class , which
* declares where to store the object data in Amazon DynamoDB , and the query
* expression parameter allows the caller to filter results and control how
* the query is executed .
* Callers should be aware that the returned list is unmodifiable , and any
* attempts to modify the list will result in an
* UnsupportedOperationException .
* The unmodifiable list returned is lazily loaded when possible , so calls
* to DynamoDB will be made only as needed .
* @ param < T >
* The type of the objects being returned .
* @ param clazz
* The class annotated with DynamoDB annotations describing how
* to store the object data in Amazon DynamoDB .
* @ param queryExpression
* Details on how to run the query , including any conditions on
* the key values
* @ param config
* The configuration to use for this query , which overrides the
* default provided at object construction .
* @ return An unmodifiable list of the objects constructed from the results
* of the query operation .
* @ see PaginatedQueryList */
public < T > PaginatedQueryList < T > query ( Class < T > clazz , DynamoDBQueryExpression queryExpression , DynamoDBMapperConfig config ) { } }
|
config = mergeConfig ( config ) ; QueryRequest queryRequest = createQueryRequestFromExpression ( clazz , queryExpression , config ) ; QueryResult queryResult = db . query ( applyUserAgent ( queryRequest ) ) ; return new PaginatedQueryList < T > ( this , clazz , db , queryRequest , queryResult ) ;
|
public class SarlBatchCompiler { /** * Replies if the given resource is a script .
* @ param resource the resource to test .
* @ return < code > true < / code > if the given resource is a script . */
@ SuppressWarnings ( "static-method" ) protected boolean isSourceFile ( Resource resource ) { } }
|
if ( resource instanceof BatchLinkableResource ) { return ! ( ( BatchLinkableResource ) resource ) . isLoadedFromStorage ( ) ; } return false ;
|
public class BuildDataHelper { /** * Calculate build data from downstream builds , that could be a shared library
* which is loaded first in a pipeline . For that reason , this method compares
* all remote URLs for each build data , with the real project name , to determine
* the proper build data . This way , the SHA returned in the build data will
* relate to the project
* @ param parentName name of the parent build
* @ param parentFullName full name of the parent build
* @ param buildDataList the list of build datas from a build run
* @ return the build data related to the project , null if not found */
public static BuildData calculateBuildData ( String parentName , String parentFullName , List < BuildData > buildDataList ) { } }
|
if ( buildDataList == null ) { return null ; } if ( buildDataList . size ( ) == 1 ) { return buildDataList . get ( 0 ) ; } String projectName = parentFullName . replace ( parentName , "" ) ; if ( projectName . endsWith ( "/" ) ) { projectName = projectName . substring ( 0 , projectName . lastIndexOf ( '/' ) ) ; } for ( BuildData buildData : buildDataList ) { Set < String > remoteUrls = buildData . getRemoteUrls ( ) ; for ( String remoteUrl : remoteUrls ) { if ( remoteUrl . contains ( projectName ) ) { return buildData ; } } } return null ;
|
public class CmsListMetadata { /** * Returns < code > true < / code > if any column definition contains a single action . < p >
* @ return < code > true < / code > if any column definition contains a single action */
public boolean hasSingleActions ( ) { } }
|
Iterator < CmsListColumnDefinition > itCols = m_columns . elementList ( ) . iterator ( ) ; while ( itCols . hasNext ( ) ) { CmsListColumnDefinition col = itCols . next ( ) ; if ( ! col . getDefaultActions ( ) . isEmpty ( ) || ! col . getDirectActions ( ) . isEmpty ( ) ) { return true ; } } return false ;
|
public class Table { /** * Adds an element if the element at the given index is null . Returns true if no element existed at the given index ,
* else returns false and doesn ' t set the element .
* @ param seqno
* @ param element
* @ return True if the element at the computed index was null , else false */
public boolean add ( long seqno , T element ) { } }
|
lock . lock ( ) ; try { return _add ( seqno , element , true , null ) ; } finally { lock . unlock ( ) ; }
|
public class ResultSummary { /** * CHECKSTYLE : CONSTANTS - OFF */
public static String createSummary ( final TasksResult result ) { } }
|
StringBuilder summary = new StringBuilder ( ) ; int tasks = result . getNumberOfAnnotations ( ) ; summary . append ( Messages . Tasks_ResultAction_Summary ( ) ) ; summary . append ( " " ) ; if ( tasks > 0 ) { summary . append ( "<a href=\"tasksResult\">" ) ; } if ( tasks == 1 ) { summary . append ( Messages . Tasks_ResultAction_OneWarning ( ) ) ; } else { summary . append ( Messages . Tasks_ResultAction_MultipleWarnings ( tasks ) ) ; } if ( tasks > 0 ) { summary . append ( "</a>" ) ; } summary . append ( " " ) ; if ( result . getNumberOfFiles ( ) > 1 ) { summary . append ( Messages . Tasks_ResultAction_MultipleFiles ( result . getNumberOfFiles ( ) ) ) ; } else { summary . append ( Messages . Tasks_ResultAction_OneFile ( ) ) ; } summary . append ( "." ) ; return summary . toString ( ) ;
|
public class PerformanceCache { /** * returns a cached performance object , null if not yet in the cache .
* @ param cvthe number of folds in the cross - validation
* @ param valuesthe point in the space
* @ returnthe cached performance item , null if not in cache */
public Performance get ( int cv , Point < Object > values ) { } }
|
return m_Cache . get ( getID ( cv , values ) ) ;
|
public class PeerEventServiceClient { /** * Peer eventing */
void peerVent ( TransactionContext transactionContext ) throws TransactionException { } }
|
logger . trace ( toString ( ) + "peerVent transaction: " + transactionContext ) ; final Envelope envelope ; try { Ab . SeekPosition . Builder start = Ab . SeekPosition . newBuilder ( ) ; if ( null != peerOptions . getNewest ( ) ) { start . setNewest ( Ab . SeekNewest . getDefaultInstance ( ) ) ; } else if ( peerOptions . getStartEvents ( ) != null ) { start . setSpecified ( Ab . SeekSpecified . newBuilder ( ) . setNumber ( peerOptions . getStartEvents ( ) ) ) ; } else { start . setNewest ( Ab . SeekNewest . getDefaultInstance ( ) ) ; } envelope = createSeekInfoEnvelope ( transactionContext , start . build ( ) , Ab . SeekPosition . newBuilder ( ) . setSpecified ( Ab . SeekSpecified . newBuilder ( ) . setNumber ( peerOptions . getStopEvents ( ) ) . build ( ) ) . build ( ) , SeekInfo . SeekBehavior . BLOCK_UNTIL_READY , clientTLSCertificateDigest ) ; connectEnvelope ( envelope ) ; } catch ( Exception e ) { throw new TransactionException ( toString ( ) + " error message: " + e . getMessage ( ) , e ) ; }
|
public class CPDefinitionSpecificationOptionValuePersistenceImpl { /** * Returns an ordered range of all the cp definition specification option values where CPDefinitionId = & # 63 ; and CPOptionCategoryId = & # 63 ; .
* Useful when paginating results . Returns a maximum of < code > end - start < / code > instances . < code > start < / code > and < code > end < / code > are not primary keys , they are indexes in the result set . Thus , < code > 0 < / code > refers to the first result in the set . Setting both < code > start < / code > and < code > end < / code > to { @ link QueryUtil # ALL _ POS } will return the full result set . If < code > orderByComparator < / code > is specified , then the query will include the given ORDER BY logic . If < code > orderByComparator < / code > is absent and pagination is required ( < code > start < / code > and < code > end < / code > are not { @ link QueryUtil # ALL _ POS } ) , then the query will include the default ORDER BY logic from { @ link CPDefinitionSpecificationOptionValueModelImpl } . If both < code > orderByComparator < / code > and pagination are absent , for performance reasons , the query will not have an ORDER BY clause and the returned result set will be sorted on by the primary key in an ascending order .
* @ param CPDefinitionId the cp definition ID
* @ param CPOptionCategoryId the cp option category ID
* @ param start the lower bound of the range of cp definition specification option values
* @ param end the upper bound of the range of cp definition specification option values ( not inclusive )
* @ param orderByComparator the comparator to order the results by ( optionally < code > null < / code > )
* @ return the ordered range of matching cp definition specification option values */
@ Override public List < CPDefinitionSpecificationOptionValue > findByC_COC ( long CPDefinitionId , long CPOptionCategoryId , int start , int end , OrderByComparator < CPDefinitionSpecificationOptionValue > orderByComparator ) { } }
|
return findByC_COC ( CPDefinitionId , CPOptionCategoryId , start , end , orderByComparator , true ) ;
|
public class ConcurrentLinkedDeque { /** * Returns the predecessor of p , or the last node if p . prev has been
* linked to self , which will only be true if traversing with a
* stale pointer that is now off the list . */
final Node < E > pred ( Node < E > p ) { } }
|
Node < E > q = p . prev ; // j2objc : q = = p . prev = = sentinel means node GC - unlinked
return ( sentinel ( ) == q ) ? last ( ) : q ;
|
public class RequestCallbackInInterceptor { /** * This functions reads SAM flowId and sets it
* as message property for subsequent store in CallContext
* @ param message */
private static void setupFlowId ( SoapMessage message ) { } }
|
String flowId = FlowIdHelper . getFlowId ( message ) ; if ( flowId == null ) { flowId = FlowIdProtocolHeaderCodec . readFlowId ( message ) ; } if ( flowId == null ) { flowId = FlowIdSoapCodec . readFlowId ( message ) ; } if ( flowId == null ) { Exchange ex = message . getExchange ( ) ; if ( null != ex ) { Message reqMsg = ex . getOutMessage ( ) ; if ( null != reqMsg ) { flowId = FlowIdHelper . getFlowId ( reqMsg ) ; } } } if ( flowId != null && ! flowId . isEmpty ( ) ) { FlowIdHelper . setFlowId ( message , flowId ) ; }
|
public class NioSocketIOChannel { /** * Close the socket */
public void close ( ) { } }
|
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) { Tr . entry ( this , tc , "close" ) ; } // Doing a channel . close ( ) will actually put the keys associated with this
// channel
// into the associated selectors cancelledKey list , but they don ' t get
// removed until the
// next time the selector runs . In some cases , we need to make sure they are
// really gone before we
// we close the channel , because another thread may have requested another
// operation
// on the channel at the same time , which causes problems on some OS ' es .
// So , cancel them manually and wait for the selector to run once before
// continuing
// call super . close ( ) to see if we are using Regular Sockets , and if so , let
// the parent class handle closing the socket
super . close ( ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEventEnabled ( ) ) { Tr . event ( this , tc , "SocketChannel closing, local: " + socket . getLocalSocketAddress ( ) + " remote: " + socket . getRemoteSocketAddress ( ) ) ; } // synchronize on this SocketIOChannel to prevent duplicate closes from
// being processed
synchronized ( this ) { if ( closed ) { processClose = false ; } closed = true ; } if ( processClose ) { // checkCancel is only on if we need to manually cancel keys as
// the socket close ( ) is supposed to do that for us
if ( checkCancel ) { // Remove socket from current selectors .
if ( channelSelectorRead != null ) { SelectionKey k = channelSelectorRead . getKey ( channel ) ; if ( k != null ) { CancelRequest cr = new CancelRequest ( k ) ; synchronized ( cr ) { channelSelectorRead . addCancelRequest ( cr ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( this , tc , "waiting for read key to be canceled, key is " + k ) ; } try { cr . wait ( ) ; } catch ( InterruptedException e ) { // No FFDC code needed
} } // end - sync
} } if ( channelSelectorWrite != null ) { SelectionKey k = channelSelectorWrite . getKey ( channel ) ; if ( k != null ) { CancelRequest cr = new CancelRequest ( k ) ; synchronized ( cr ) { channelSelectorWrite . addCancelRequest ( cr ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( this , tc , "waiting for write key to be canceled, key is " + k ) ; } try { cr . wait ( ) ; } catch ( InterruptedException e ) { // No FFDC code needed
} } // end - sync
} } } try { if ( channel != null ) { channel . close ( ) ; } // need to make sure the keys get cancelled after the close call
if ( channelSelectorRead != null ) { channelSelectorRead . wakeup ( ) ; channelSelectorRead = null ; } if ( channelSelectorWrite != null ) { channelSelectorWrite . wakeup ( ) ; channelSelectorWrite = null ; } } catch ( Throwable t ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( this , tc , "Error closing channel: " + t ) ; } // ignore all shutdown / close errors
} } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) { Tr . exit ( this , tc , "close" ) ; }
|
public class ApiOvhTelephony { /** * Transfer security deposit between two billing accounts
* REST : POST / telephony / { billingAccount } / transferSecurityDeposit
* @ param amount [ required ] The amount , in euros , you want to transfer
* @ param billingAccountDestination [ required ] The destination billing account
* @ param billingAccount [ required ] The name of your billingAccount */
public void billingAccount_transferSecurityDeposit_POST ( String billingAccount , Long amount , String billingAccountDestination ) throws IOException { } }
|
String qPath = "/telephony/{billingAccount}/transferSecurityDeposit" ; StringBuilder sb = path ( qPath , billingAccount ) ; HashMap < String , Object > o = new HashMap < String , Object > ( ) ; addBody ( o , "amount" , amount ) ; addBody ( o , "billingAccountDestination" , billingAccountDestination ) ; exec ( qPath , "POST" , sb . toString ( ) , o ) ;
|
public class ReporterConfigMetadata { /** * This method will generate JSON string representation of the all items in current ReportConfigMetadata . */
public static String toJsonAsString ( ) { } }
|
logger . entering ( ) ; Gson gson = new GsonBuilder ( ) . setPrettyPrinting ( ) . create ( ) ; JsonObject configItem = new JsonObject ( ) ; for ( Entry < String , Map < String , String > > entry : ReporterConfigMetadata . getReporterMetaData ( ) . entrySet ( ) ) { Map < String , String > subMap = entry . getValue ( ) ; for ( Entry < String , String > subEntry : subMap . entrySet ( ) ) { JsonObject configSubItem = new JsonObject ( ) ; configSubItem . addProperty ( subEntry . getKey ( ) , subEntry . getValue ( ) ) ; configItem . add ( entry . getKey ( ) , configSubItem ) ; } } String json = gson . toJson ( configItem ) ; logger . exiting ( json ) ; return json ;
|
public class MPPResourceField { /** * Retrieve an instance of the ResourceField class based on the data read from an
* MS Project file .
* @ param value value from an MS Project file
* @ return ResourceField instance */
public static ResourceField getInstance ( int value ) { } }
|
ResourceField result = null ; if ( value >= 0 && value < FIELD_ARRAY . length ) { result = FIELD_ARRAY [ value ] ; } else { if ( ( value & 0x8000 ) != 0 ) { int baseValue = ResourceField . ENTERPRISE_CUSTOM_FIELD1 . getValue ( ) ; int id = baseValue + ( value & 0xFFF ) ; result = ResourceField . getInstance ( id ) ; } } return ( result ) ;
|
public class FieldMap { /** * Creates a field map for tasks .
* @ param props props data */
public void createTaskFieldMap ( Props props ) { } }
|
byte [ ] fieldMapData = null ; for ( Integer key : TASK_KEYS ) { fieldMapData = props . getByteArray ( key ) ; if ( fieldMapData != null ) { break ; } } if ( fieldMapData == null ) { populateDefaultData ( getDefaultTaskData ( ) ) ; } else { createFieldMap ( fieldMapData ) ; }
|
public class CommerceUserSegmentEntryPersistenceImpl { /** * Returns the first commerce user segment entry in the ordered set where groupId = & # 63 ; .
* @ param groupId the group ID
* @ param orderByComparator the comparator to order the set by ( optionally < code > null < / code > )
* @ return the first matching commerce user segment entry
* @ throws NoSuchUserSegmentEntryException if a matching commerce user segment entry could not be found */
@ Override public CommerceUserSegmentEntry findByGroupId_First ( long groupId , OrderByComparator < CommerceUserSegmentEntry > orderByComparator ) throws NoSuchUserSegmentEntryException { } }
|
CommerceUserSegmentEntry commerceUserSegmentEntry = fetchByGroupId_First ( groupId , orderByComparator ) ; if ( commerceUserSegmentEntry != null ) { return commerceUserSegmentEntry ; } StringBundler msg = new StringBundler ( 4 ) ; msg . append ( _NO_SUCH_ENTITY_WITH_KEY ) ; msg . append ( "groupId=" ) ; msg . append ( groupId ) ; msg . append ( "}" ) ; throw new NoSuchUserSegmentEntryException ( msg . toString ( ) ) ;
|
public class VoiceApi { /** * Perform a single - step transfer to the specified destination .
* @ param connId The connection ID of the call to transfer .
* @ param destination The number where the call should be transferred .
* @ param userData Key / value data to include with the call . ( optional ) */
public void singleStepTransfer ( String connId , String destination , KeyValueCollection userData ) throws WorkspaceApiException { } }
|
this . singleStepTransfer ( connId , destination , null , userData , null , null ) ;
|
public class JspTagUtils { /** * Gets the class name ( without package ) for the given class . */
private static String getClassName ( Class < ? > clazz ) { } }
|
String name = clazz . getSimpleName ( ) ; int dotPos = name . lastIndexOf ( '.' ) ; return dotPos == - 1 ? name : name . substring ( dotPos + 1 ) ;
|
public class ArrayViewList { public int indexOf ( Object o ) { } }
|
if ( o == null ) { for ( int i = start ; i < start + length ; i ++ ) if ( a [ i ] == null ) return i - start ; } else { for ( int i = start ; i < start + length ; i ++ ) if ( o . equals ( a [ i ] ) ) return i - start ; } return - 1 ;
|
public class SluggishGui { /** * overrides the visitor to reset look for gui interfaces
* @ param classContext
* the context object for the currently parsed class */
@ Override public void visitClassContext ( ClassContext classContext ) { } }
|
try { guiInterfaces = new HashSet < > ( ) ; JavaClass cls = classContext . getJavaClass ( ) ; JavaClass [ ] infs = cls . getAllInterfaces ( ) ; for ( JavaClass inf : infs ) { String name = inf . getClassName ( ) ; if ( ( name . startsWith ( "java.awt." ) || name . startsWith ( "javax.swing." ) ) && name . endsWith ( "Listener" ) ) { guiInterfaces . add ( inf ) ; } } if ( ! guiInterfaces . isEmpty ( ) ) { listenerCode = new LinkedHashMap < > ( ) ; expensiveThisCalls = new HashSet < > ( ) ; super . visitClassContext ( classContext ) ; } } catch ( ClassNotFoundException cnfe ) { bugReporter . reportMissingClass ( cnfe ) ; } finally { guiInterfaces = null ; listenerCode = null ; expensiveThisCalls = null ; }
|
public class A_CmsDirectEditButtons { /** * Adds the highlighting and option bar . < p > */
protected void addHighlightingAndBar ( ) { } }
|
timer = null ; highlightElement ( ) ; getElement ( ) . addClassName ( org . opencms . gwt . client . ui . css . I_CmsLayoutBundle . INSTANCE . stateCss ( ) . cmsHovering ( ) ) ;
|
public class ByteArrayBindTransform { /** * Generate parse on jackson internal .
* @ param context the context
* @ param methodBuilder the method builder
* @ param parserName the parser name
* @ param beanClass the bean class
* @ param beanName the bean name
* @ param property the property
* @ param onString the on string */
public void generateParseOnJacksonInternal ( BindTypeContext context , Builder methodBuilder , String parserName , TypeName beanClass , String beanName , BindProperty property , boolean onString ) { } }
|
if ( property . isNullable ( ) ) { methodBuilder . beginControlFlow ( "if ($L.currentToken()!=$T.VALUE_NULL)" , parserName , JsonToken . class ) ; } if ( property . hasTypeAdapter ( ) ) { if ( onString ) { methodBuilder . addStatement ( setter ( beanClass , beanName , property , PRE_TYPE_ADAPTER_TO_JAVA + "$T.decode($L.getValueAsString())" + POST_TYPE_ADAPTER ) , TypeAdapterUtils . class , TypeUtility . typeName ( property . typeAdapter . adapterClazz ) , Base64Utils . class , parserName ) ; } else { methodBuilder . addStatement ( setter ( beanClass , beanName , property , PRE_TYPE_ADAPTER_TO_JAVA + "$L.getBinaryValue()" + POST_TYPE_ADAPTER ) , TypeAdapterUtils . class , TypeUtility . typeName ( property . typeAdapter . adapterClazz ) , parserName ) ; } } else { if ( onString ) { methodBuilder . addStatement ( setter ( beanClass , beanName , property , "$T.decode($L.getValueAsString())" ) , Base64Utils . class , parserName ) ; } else { methodBuilder . addStatement ( setter ( beanClass , beanName , property , "$L.getBinaryValue()" ) , parserName ) ; } } if ( property . isNullable ( ) ) { methodBuilder . endControlFlow ( ) ; }
|
public class FeatureTransform { /** * Transform an array of feature vectors .
* @ param x an array of feature vectors . The feature
* vectors may be modified on output if copy is false .
* @ return the transformed feature vectors . */
public double [ ] [ ] transform ( double [ ] [ ] x ) { } }
|
double [ ] [ ] y = new double [ x . length ] [ ] ; for ( int i = 0 ; i < y . length ; i ++ ) { y [ i ] = transform ( x [ i ] ) ; } return y ;
|
public class Apptentive { /** * Must be called from the { @ link Application # onCreate ( ) } method in the { @ link Application } object defined in your app ' s manifest .
* Note : application key and signature would be resolved from the AndroidManifest . xml
* @ param application The { @ link Application } object for this app .
* @ deprecated Please , use { @ link # register ( Application , String , String ) } or { @ link # register ( Application , ApptentiveConfiguration ) } instead */
@ Deprecated public static void register ( Application application ) { } }
|
if ( application == null ) { throw new IllegalArgumentException ( "Application is null" ) ; } String apptentiveKey = Util . getManifestMetadataString ( application , Constants . MANIFEST_KEY_APPTENTIVE_KEY ) ; if ( StringUtils . isNullOrEmpty ( apptentiveKey ) ) { ApptentiveLog . e ( "Unable to initialize Apptentive SDK: '%s' manifest key is missing" , Constants . MANIFEST_KEY_APPTENTIVE_KEY ) ; return ; } String apptentiveSignature = Util . getManifestMetadataString ( application , Constants . MANIFEST_KEY_APPTENTIVE_SIGNATURE ) ; if ( StringUtils . isNullOrEmpty ( apptentiveSignature ) ) { ApptentiveLog . e ( "Unable to initialize Apptentive SDK: '%s' manifest key is missing" , Constants . MANIFEST_KEY_APPTENTIVE_SIGNATURE ) ; return ; } ApptentiveConfiguration configuration = new ApptentiveConfiguration ( apptentiveKey , apptentiveSignature ) ; String logLevelString = Util . getManifestMetadataString ( application , Constants . MANIFEST_KEY_APPTENTIVE_LOG_LEVEL ) ; ApptentiveLog . Level logLevel = ApptentiveLog . Level . parse ( logLevelString ) ; if ( logLevel != ApptentiveLog . Level . UNKNOWN ) { configuration . setLogLevel ( logLevel ) ; } register ( application , configuration ) ;
|
public class GVRAnimation { /** * Sets the offset for the animation .
* @ param startOffset animation will start at the specified offset value
* @ return { @ code this } , so you can chain setProperty ( ) calls .
* @ throws IllegalArgumentException
* If { @ code startOffset } is either negative or greater than
* the animation duration */
public GVRAnimation setOffset ( float startOffset ) { } }
|
if ( startOffset < 0 || startOffset > mDuration ) { throw new IllegalArgumentException ( "offset should not be either negative or greater than duration" ) ; } animationOffset = startOffset ; mDuration = mDuration - animationOffset ; return this ;
|
public class AWSCloud { /** * Puts the given key / value into the given map only if the value is not null .
* @ param parameters the map to add to
* @ param key the key of the value
* @ param value the value to add if not null */
public static void addValueIfNotNull ( @ Nonnull Map < String , String > parameters , @ Nonnull String key , Object value ) { } }
|
if ( value == null ) { return ; } parameters . put ( key , value . toString ( ) ) ;
|
public class ActionBarSherlock { /** * Wrap an activity with an action bar abstraction which will enable the
* use of a custom implementation on platforms where a native version does
* not exist .
* @ param activity Owning activity .
* @ param flags Option flags to control behavior .
* @ return Instance to interact with the action bar . */
public static ActionBarSherlock wrap ( Activity activity , int flags ) { } }
|
// Create a local implementation map we can modify
HashMap < Implementation , Class < ? extends ActionBarSherlock > > impls = new HashMap < Implementation , Class < ? extends ActionBarSherlock > > ( IMPLEMENTATIONS ) ; boolean hasQualfier ; /* DPI FILTERING */
hasQualfier = false ; for ( Implementation key : impls . keySet ( ) ) { // Only honor TVDPI as a specific qualifier
if ( key . dpi ( ) == DisplayMetrics . DENSITY_TV ) { hasQualfier = true ; break ; } } if ( hasQualfier ) { final boolean isTvDpi = activity . getResources ( ) . getDisplayMetrics ( ) . densityDpi == DisplayMetrics . DENSITY_TV ; for ( Iterator < Implementation > keys = impls . keySet ( ) . iterator ( ) ; keys . hasNext ( ) ; ) { int keyDpi = keys . next ( ) . dpi ( ) ; if ( ( isTvDpi && keyDpi != DisplayMetrics . DENSITY_TV ) || ( ! isTvDpi && keyDpi == DisplayMetrics . DENSITY_TV ) ) { keys . remove ( ) ; } } } /* API FILTERING */
hasQualfier = false ; for ( Implementation key : impls . keySet ( ) ) { if ( key . api ( ) != Implementation . DEFAULT_API ) { hasQualfier = true ; break ; } } if ( hasQualfier ) { final int runtimeApi = Build . VERSION . SDK_INT ; int bestApi = 0 ; for ( Iterator < Implementation > keys = impls . keySet ( ) . iterator ( ) ; keys . hasNext ( ) ; ) { int keyApi = keys . next ( ) . api ( ) ; if ( keyApi > runtimeApi ) { keys . remove ( ) ; } else if ( keyApi > bestApi ) { bestApi = keyApi ; } } for ( Iterator < Implementation > keys = impls . keySet ( ) . iterator ( ) ; keys . hasNext ( ) ; ) { if ( keys . next ( ) . api ( ) != bestApi ) { keys . remove ( ) ; } } } if ( impls . size ( ) > 1 ) { throw new IllegalStateException ( "More than one implementation matches configuration." ) ; } if ( impls . isEmpty ( ) ) { throw new IllegalStateException ( "No implementations match configuration." ) ; } Class < ? extends ActionBarSherlock > impl = impls . values ( ) . iterator ( ) . next ( ) ; if ( DEBUG ) Log . i ( TAG , "Using implementation: " + impl . getSimpleName ( ) ) ; try { Constructor < ? extends ActionBarSherlock > ctor = impl . getConstructor ( CONSTRUCTOR_ARGS ) ; return ctor . newInstance ( activity , flags ) ; } catch ( NoSuchMethodException e ) { throw new RuntimeException ( e ) ; } catch ( IllegalArgumentException e ) { throw new RuntimeException ( e ) ; } catch ( InstantiationException e ) { throw new RuntimeException ( e ) ; } catch ( IllegalAccessException e ) { throw new RuntimeException ( e ) ; } catch ( InvocationTargetException e ) { throw new RuntimeException ( e ) ; }
|
public class MsgFuncGenerator { /** * Private helper to process and collect all variables used within this msg node for code
* generation .
* @ return A Map populated with all the variables used with in this message node , using { @ link
* MsgPlaceholderInitialNode # genBasePhName } . */
private Map < PyExpr , PyExpr > collectVarNameListAndToPyExprMap ( ) { } }
|
Map < PyExpr , PyExpr > nodePyVarToPyExprMap = new LinkedHashMap < > ( ) ; for ( Map . Entry < String , MsgSubstUnitNode > entry : msgNode . getVarNameToRepNodeMap ( ) . entrySet ( ) ) { MsgSubstUnitNode substUnitNode = entry . getValue ( ) ; PyExpr substPyExpr = null ; if ( substUnitNode instanceof MsgPlaceholderNode ) { SoyNode phInitialNode = ( ( AbstractParentSoyNode < ? > ) substUnitNode ) . getChild ( 0 ) ; if ( phInitialNode instanceof PrintNode || phInitialNode instanceof CallNode || phInitialNode instanceof RawTextNode ) { substPyExpr = PyExprUtils . concatPyExprs ( genPyExprsVisitor . exec ( phInitialNode ) ) . toPyString ( ) ; } // when the placeholder is generated by HTML tags
if ( phInitialNode instanceof MsgHtmlTagNode ) { substPyExpr = PyExprUtils . concatPyExprs ( genPyExprsVisitor . execOnChildren ( ( ParentSoyNode < ? > ) phInitialNode ) ) . toPyString ( ) ; } } else if ( substUnitNode instanceof MsgPluralNode ) { // Translates { @ link MsgPluralNode # pluralExpr } into a Python lookup expression .
// Note that { @ code pluralExpr } represents the soy expression of the { @ code plural } attr ,
// i . e . the { @ code $ numDrafts } in { @ code { plural $ numDrafts } . . . { / plural } } .
substPyExpr = translateToPyExprVisitor . exec ( ( ( MsgPluralNode ) substUnitNode ) . getExpr ( ) ) ; } else if ( substUnitNode instanceof MsgSelectNode ) { substPyExpr = translateToPyExprVisitor . exec ( ( ( MsgSelectNode ) substUnitNode ) . getExpr ( ) ) ; } if ( substPyExpr != null ) { nodePyVarToPyExprMap . put ( new PyStringExpr ( "'" + entry . getKey ( ) + "'" ) , substPyExpr ) ; } } return nodePyVarToPyExprMap ;
|
public class Encoding { /** * Adds a unit clause to the given SAT solver .
* @ param s the sat solver
* @ param a the unit literal
* @ param blocking the blocking literal */
private void addUnitClause ( final MiniSatStyleSolver s , int a , int blocking ) { } }
|
assert this . clause . size ( ) == 0 ; assert a != LIT_UNDEF ; assert var ( a ) < s . nVars ( ) ; this . clause . push ( a ) ; if ( blocking != LIT_UNDEF ) this . clause . push ( blocking ) ; s . addClause ( this . clause , null ) ; this . clause . clear ( ) ;
|
public class ServletContextAccess { /** * / * ( non - Javadoc )
* @ see com . att . cadi . PropAccess # log ( java . lang . Exception , java . lang . Object [ ] ) */
@ Override public void log ( Exception e , Object ... elements ) { } }
|
StringBuilder sb = buildMsg ( Level . ERROR , elements ) ; context . log ( sb . toString ( ) , e ) ;
|
public class CmsExportPointDriver { /** * Deletes a file or a folder in the real file sytem . < p >
* If the given resource name points to a folder , then this folder is only deleted if it is empty .
* This is required since the same export point RFS target folder may be used by multiple export points .
* For example , this is usually the case with the < code > / WEB - INF / classes / < / code > and
* < code > / WEB - INF / lib / < / code > folders which are export point for multiple modules .
* If all resources in the RFS target folder where deleted , uninstalling one module would delete the
* export < code > classes < / code > and < code > lib < / code > resources of all other modules . < p >
* @ param resourceName the root path of the resource to be deleted
* @ param exportpoint the name of the export point */
public void deleteResource ( String resourceName , String exportpoint ) { } }
|
File file = getExportPointFile ( resourceName , exportpoint ) ; if ( file . exists ( ) && file . canWrite ( ) ) { // delete the file ( or folder )
file . delete ( ) ; // also delete empty parent directories
File parent = file . getParentFile ( ) ; if ( parent . canWrite ( ) ) { parent . delete ( ) ; } }
|
public class JSONArray { /** * Returns the value at { @ code index } , or null if the array has no value
* at { @ code index } . */
public Object opt ( int index ) { } }
|
if ( index < 0 || index >= values . size ( ) ) { return null ; } return values . get ( index ) ;
|
public class Boxing { /** * Returns any multidimensional array into an array of boxed values .
* @ param src source array
* @ return multidimensional array */
public static Object deepBox ( Object src ) { } }
|
Class < ? > resultType = arrayBoxingType ( src . getClass ( ) ) ; return deepBox ( resultType , src ) ;
|
public class ProtocolSerializer { /** * Read a message from the input byte stream and send it to the event handler .
* @ param messageBytes An array of bytes that contains the message to be deserialized .
* @ param observer An implementation of the MultiObserver interface which will be called
* to process the deserialized message . */
public void read ( final byte [ ] messageBytes , final MultiObserver observer ) { } }
|
try ( final InputStream inputStream = new ByteArrayInputStream ( messageBytes ) ) { // Binary decoder for both the header and the message .
final BinaryDecoder decoder = DecoderFactory . get ( ) . binaryDecoder ( inputStream , null ) ; // Read the header message .
final Header header = this . headerReader . read ( null , decoder ) ; final String classId = header . getClassName ( ) . toString ( ) ; LOG . log ( Level . FINEST , "Deserializing Avro message: {0}" , classId ) ; // Get the appropriate deserializer and deserialize the message .
final IMessageDeserializer deserializer = this . nameToDeserializerMap . get ( classId ) ; if ( deserializer != null ) { deserializer . deserialize ( decoder , observer , header . getSequence ( ) ) ; } else { throw new RuntimeException ( "Request to deserialize unknown message type: " + classId ) ; } } catch ( final IOException e ) { throw new RuntimeException ( "Failure reading message" , e ) ; } catch ( final InvocationTargetException | IllegalAccessException e ) { throw new RuntimeException ( "Error deserializing message body" , e ) ; }
|
public class PermittedRepository { /** * Find permitted entities by parameters .
* @ param whereCondition the parameters condition
* @ param conditionParams the parameters map
* @ param pageable the page info
* @ param entityClass the entity class to get
* @ param privilegeKey the privilege key for permission lookup
* @ param < T > the type of entity
* @ return page of permitted entities */
public < T > Page < T > findByCondition ( String whereCondition , Map < String , Object > conditionParams , Pageable pageable , Class < T > entityClass , String privilegeKey ) { } }
|
return findByCondition ( whereCondition , conditionParams , null , pageable , entityClass , privilegeKey ) ;
|
public class RouterExample { /** * tag : : negotiationBasedOnAccepts [ ] */
@ Route ( method = HttpMethod . POST , uri = "/consume" , accepts = "application/xml" ) public Result fromXML ( @ Body Data form ) { } }
|
return ok ( form ) . xml ( ) ;
|
public class DocumentModelResources { /** * Load the external resources such as gazetters and clustering lexicons .
* @ param params
* the training parameters
* @ return the map contanining and id and the resource
* @ throws IOException
* if io error */
public static Map < String , Object > loadParseResources ( final TrainingParameters params ) throws IOException { } }
|
final Map < String , Object > resources = new HashMap < String , Object > ( ) ; @ SuppressWarnings ( "rawtypes" ) final Map < String , ArtifactSerializer > artifactSerializers = SequenceLabelerModel . createArtifactSerializers ( ) ; if ( Flags . isBrownFeatures ( params ) ) { final String ClusterLexiconPath = Flags . getBrownFeatures ( params ) ; final String serializerId = "brownserializer" ; final List < File > ClusterLexiconFiles = Flags . getClusterLexiconFiles ( ClusterLexiconPath ) ; for ( final File ClusterLexiconFile : ClusterLexiconFiles ) { final String brownFilePath = ClusterLexiconFile . getCanonicalPath ( ) ; artifactSerializers . put ( serializerId , new WordCluster . WordClusterSerializer ( ) ) ; loadResource ( serializerId , artifactSerializers , brownFilePath , resources ) ; } } if ( Flags . isClarkFeatures ( params ) ) { final String clarkClusterPath = Flags . getClarkFeatures ( params ) ; final String serializerId = "clarkserializer" ; final List < File > clarkClusterFiles = Flags . getClusterLexiconFiles ( clarkClusterPath ) ; for ( final File clarkClusterFile : clarkClusterFiles ) { final String clarkFilePath = clarkClusterFile . getCanonicalPath ( ) ; artifactSerializers . put ( serializerId , new WordCluster . WordClusterSerializer ( ) ) ; loadResource ( serializerId , artifactSerializers , clarkFilePath , resources ) ; } } if ( Flags . isWord2VecClusterFeatures ( params ) ) { final String ClusterLexiconPath = Flags . getWord2VecClusterFeatures ( params ) ; final String serializerId = "word2vecserializer" ; final List < File > ClusterLexiconFiles = Flags . getClusterLexiconFiles ( ClusterLexiconPath ) ; for ( final File ClusterLexiconFile : ClusterLexiconFiles ) { final String word2vecFilePath = ClusterLexiconFile . getCanonicalPath ( ) ; artifactSerializers . put ( serializerId , new WordCluster . WordClusterSerializer ( ) ) ; loadResource ( serializerId , artifactSerializers , word2vecFilePath , resources ) ; } } if ( Flags . isPOSTagModelFeatures ( params ) ) { final String morphoResourcesPath = Flags . getPOSTagModelFeatures ( params ) ; final String posSerializerId = "seqmodelserializer" ; artifactSerializers . put ( posSerializerId , new SequenceModelResource . SequenceModelResourceSerializer ( ) ) ; loadResource ( posSerializerId , artifactSerializers , morphoResourcesPath , resources ) ; } if ( Flags . isLemmaModelFeatures ( params ) ) { final String lemmaModelPath = Flags . getLemmaModelFeatures ( params ) ; final String lemmaSerializerId = "seqmodelserializer" ; artifactSerializers . put ( lemmaSerializerId , new SequenceModelResource . SequenceModelResourceSerializer ( ) ) ; loadResource ( lemmaSerializerId , artifactSerializers , lemmaModelPath , resources ) ; } if ( Flags . isLemmaDictionaryFeatures ( params ) ) { final String lemmaDictPath = Flags . getLemmaDictionaryFeatures ( params ) ; final String [ ] lemmaDictResources = Flags . getLemmaDictionaryResources ( lemmaDictPath ) ; final String posSerializerId = "seqmodelserializer" ; final String lemmaDictSerializerId = "lemmadictserializer" ; artifactSerializers . put ( posSerializerId , new SequenceModelResource . SequenceModelResourceSerializer ( ) ) ; loadResource ( posSerializerId , artifactSerializers , lemmaDictResources [ 0 ] , resources ) ; artifactSerializers . put ( lemmaDictSerializerId , new DictionaryLemmatizer . DictionaryLemmatizerSerializer ( ) ) ; loadResource ( lemmaDictSerializerId , artifactSerializers , lemmaDictResources [ 1 ] , resources ) ; } return resources ;
|
public class CommandGroup { /** * Create a button bar with buttons for all the commands in this group . Adds
* a border top and bottom of 2 spaces .
* @ param minimumButtonSize if null , then there is no minimum size
* @ return never null */
public JComponent createButtonBar ( Size minimumButtonSize ) { } }
|
return createButtonBar ( minimumButtonSize , GuiStandardUtils . createTopAndBottomBorder ( UIConstants . TWO_SPACES ) ) ;
|
public class WhiteboxImpl { /** * Find the method whose parameter types most closely matches the { @ code types } .
* @ param firstMethodCandidate The first method candidate
* @ param secondMethodCandidate The second method candidate
* @ return The method that most closely matches the provided types or { @ code null } if no method match . */
private static Method getMethodWithMostSpecificParameterTypes ( Method firstMethodCandidate , Method secondMethodCandidate ) { } }
|
Class < ? > [ ] firstMethodCandidateParameterTypes = firstMethodCandidate . getParameterTypes ( ) ; Class < ? > [ ] secondMethodCandidateParameterTypes = secondMethodCandidate . getParameterTypes ( ) ; Method bestMatch = null ; for ( int i = 0 ; i < firstMethodCandidateParameterTypes . length ; i ++ ) { Class < ? > candidateType1 = toBoxedIfPrimitive ( firstMethodCandidateParameterTypes [ i ] ) ; Class < ? > candidateType2 = toBoxedIfPrimitive ( secondMethodCandidateParameterTypes [ i ] ) ; if ( ! candidateType1 . equals ( candidateType2 ) ) { Method potentialMatch = null ; if ( candidateType1 . isAssignableFrom ( candidateType2 ) ) { potentialMatch = secondMethodCandidate ; } else if ( candidateType2 . isAssignableFrom ( candidateType1 ) ) { potentialMatch = firstMethodCandidate ; } if ( potentialMatch != null ) { if ( bestMatch != null && ! potentialMatch . equals ( bestMatch ) ) { /* * We cannot determine which method is the most specific because one parameter of the first candidate
* was more specific and another parameter of the second candidate was more specific . */
return null ; } else { bestMatch = potentialMatch ; } } } } return bestMatch ;
|
public class ProtocolVersionImpl { /** * ( non - Javadoc )
* @ see org . restcomm . protocols . ss7 . tcap . asn . Encodable # decode ( org . mobicents . protocols . asn . AsnInputStream ) */
public void decode ( AsnInputStream ais ) throws ParseException { } }
|
try { BitSetStrictLength readV = ais . readBitString ( ) ; if ( readV . getStrictLength ( ) >= 1 && readV . get ( 0 ) ) { // ok
} else { this . supportedVersion = false ; } } catch ( IOException e ) { throw new ParseException ( PAbortCauseType . BadlyFormattedTxPortion , null , "IOException while decoding ProtocolVersion: " + e . getMessage ( ) , e ) ; } catch ( AsnException e ) { throw new ParseException ( PAbortCauseType . BadlyFormattedTxPortion , null , "AsnException while decoding ProtocolVersion: " + e . getMessage ( ) , e ) ; }
|
public class Ifc2x3tc1PackageImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
public EEnum getIfcAheadOrBehind ( ) { } }
|
if ( ifcAheadOrBehindEEnum == null ) { ifcAheadOrBehindEEnum = ( EEnum ) EPackage . Registry . INSTANCE . getEPackage ( Ifc2x3tc1Package . eNS_URI ) . getEClassifiers ( ) . get ( 774 ) ; } return ifcAheadOrBehindEEnum ;
|
public class MessageListenerExample { /** * NOTE THE @ Override !
* This method is actually overriding a method in the ListenerAdapter class ! We place an @ Override annotation
* right before any method that is overriding another to guarantee to ourselves that it is actually overriding
* a method from a super class properly . You should do this every time you override a method !
* As stated above , this method is overriding a hook method in the
* { @ link net . dv8tion . jda . core . hooks . ListenerAdapter ListenerAdapter } class . It has convience methods for all JDA events !
* Consider looking through the events it offers if you plan to use the ListenerAdapter .
* In this example , when a message is received it is printed to the console .
* @ param event
* An event containing information about a { @ link net . dv8tion . jda . core . entities . Message Message } that was
* sent in a channel . */
@ Override public void onMessageReceived ( MessageReceivedEvent event ) { } }
|
// These are provided with every event in JDA
JDA jda = event . getJDA ( ) ; // JDA , the core of the api .
long responseNumber = event . getResponseNumber ( ) ; // The amount of discord events that JDA has received since the last reconnect .
// Event specific information
User author = event . getAuthor ( ) ; // The user that sent the message
Message message = event . getMessage ( ) ; // The message that was received .
MessageChannel channel = event . getChannel ( ) ; // This is the MessageChannel that the message was sent to .
// This could be a TextChannel , PrivateChannel , or Group !
String msg = message . getContentDisplay ( ) ; // This returns a human readable version of the Message . Similar to
// what you would see in the client .
boolean bot = author . isBot ( ) ; // This boolean is useful to determine if the User that
// sent the Message is a BOT or not !
if ( event . isFromType ( ChannelType . TEXT ) ) // If this message was sent to a Guild TextChannel
{ // Because we now know that this message was sent in a Guild , we can do guild specific things
// Note , if you don ' t check the ChannelType before using these methods , they might return null due
// the message possibly not being from a Guild !
Guild guild = event . getGuild ( ) ; // The Guild that this message was sent in . ( note , in the API , Guilds are Servers )
TextChannel textChannel = event . getTextChannel ( ) ; // The TextChannel that this message was sent to .
Member member = event . getMember ( ) ; // This Member that sent the message . Contains Guild specific information about the User !
String name ; if ( message . isWebhookMessage ( ) ) { name = author . getName ( ) ; // If this is a Webhook message , then there is no Member associated
} // with the User , thus we default to the author for name .
else { name = member . getEffectiveName ( ) ; // This will either use the Member ' s nickname if they have one ,
} // otherwise it will default to their username . ( User # getName ( ) )
System . out . printf ( "(%s)[%s]<%s>: %s\n" , guild . getName ( ) , textChannel . getName ( ) , name , msg ) ; } else if ( event . isFromType ( ChannelType . PRIVATE ) ) // If this message was sent to a PrivateChannel
{ // The message was sent in a PrivateChannel .
// In this example we don ' t directly use the privateChannel , however , be sure , there are uses for it !
PrivateChannel privateChannel = event . getPrivateChannel ( ) ; System . out . printf ( "[PRIV]<%s>: %s\n" , author . getName ( ) , msg ) ; } else if ( event . isFromType ( ChannelType . GROUP ) ) // If this message was sent to a Group . This is CLIENT only !
{ // The message was sent in a Group . It should be noted that Groups are CLIENT only .
Group group = event . getGroup ( ) ; String groupName = group . getName ( ) != null ? group . getName ( ) : "" ; // A group name can be null due to it being unnamed .
System . out . printf ( "[GRP: %s]<%s>: %s\n" , groupName , author . getName ( ) , msg ) ; } // Now that you have a grasp on the things that you might see in an event , specifically MessageReceivedEvent ,
// we will look at sending / responding to messages !
// This will be an extremely simplified example of command processing .
// Remember , in all of these . equals checks it is actually comparing
// message . getContentDisplay ( ) . equals , which is comparing a string to a string .
// If you did message . equals ( ) it will fail because you would be comparing a Message to a String !
if ( msg . equals ( "!ping" ) ) { // This will send a message , " pong ! " , by constructing a RestAction and " queueing " the action with the Requester .
// By calling queue ( ) , we send the Request to the Requester which will send it to discord . Using queue ( ) or any
// of its different forms will handle ratelimiting for you automatically !
channel . sendMessage ( "pong!" ) . queue ( ) ; } else if ( msg . equals ( "!roll" ) ) { // In this case , we have an example showing how to use the Success consumer for a RestAction . The Success consumer
// will provide you with the object that results after you execute your RestAction . As a note , not all RestActions
// have object returns and will instead have Void returns . You can still use the success consumer to determine when
// the action has been completed !
Random rand = new Random ( ) ; int roll = rand . nextInt ( 6 ) + 1 ; // This results in 1 - 6 ( instead of 0 - 5)
channel . sendMessage ( "Your roll: " + roll ) . queue ( sentMessage -> // This is called a lambda statement . If you don ' t know
{ // what they are or how they work , try google !
if ( roll < 3 ) { channel . sendMessage ( "The roll for messageId: " + sentMessage . getId ( ) + " wasn't very good... Must be bad luck!\n" ) . queue ( ) ; } } ) ; } else if ( msg . startsWith ( "!kick" ) ) // Note , I used " startsWith , not equals .
{ // This is an admin command . That means that it requires specific permissions to use it , in this case
// it needs Permission . KICK _ MEMBERS . We will have a check before we attempt to kick members to see
// if the logged in account actually has the permission , but considering something could change after our
// check we should also take into account the possibility that we don ' t have permission anymore , thus Discord
// response with a permission failure !
// We will use the error consumer , the second parameter in queue !
// We only want to deal with message sent in a Guild .
if ( message . isFromType ( ChannelType . TEXT ) ) { // If no users are provided , we can ' t kick anyone !
if ( message . getMentionedUsers ( ) . isEmpty ( ) ) { channel . sendMessage ( "You must mention 1 or more Users to be kicked!" ) . queue ( ) ; } else { Guild guild = event . getGuild ( ) ; Member selfMember = guild . getSelfMember ( ) ; // This is the currently logged in account ' s Member object .
// Very similar to JDA # getSelfUser ( ) !
// Now , we the the logged in account doesn ' t have permission to kick members . . well . . we can ' t kick !
if ( ! selfMember . hasPermission ( Permission . KICK_MEMBERS ) ) { channel . sendMessage ( "Sorry! I don't have permission to kick members in this Guild!" ) . queue ( ) ; return ; // We jump out of the method instead of using cascading if / else
} // Loop over all mentioned users , kicking them one at a time . Mwauahahah !
List < User > mentionedUsers = message . getMentionedUsers ( ) ; for ( User user : mentionedUsers ) { Member member = guild . getMember ( user ) ; // We get the member object for each mentioned user to kick them !
// We need to make sure that we can interact with them . Interacting with a Member means you are higher
// in the Role hierarchy than they are . Remember , NO ONE is above the Guild ' s Owner . ( Guild # getOwner ( ) )
if ( ! selfMember . canInteract ( member ) ) { // use the MessageAction to construct the content in StringBuilder syntax using append calls
channel . sendMessage ( "Cannot kick member: " ) . append ( member . getEffectiveName ( ) ) . append ( ", they are higher in the hierarchy than I am!" ) . queue ( ) ; continue ; // Continue to the next mentioned user to be kicked .
} // Remember , due to the fact that we ' re using queue we will never have to deal with RateLimits .
// JDA will do it all for you so long as you are using queue !
guild . getController ( ) . kick ( member ) . queue ( success -> channel . sendMessage ( "Kicked " ) . append ( member . getEffectiveName ( ) ) . append ( "! Cya!" ) . queue ( ) , error -> { // The failure consumer provides a throwable . In this case we want to check for a PermissionException .
if ( error instanceof PermissionException ) { PermissionException pe = ( PermissionException ) error ; Permission missingPermission = pe . getPermission ( ) ; // If you want to know exactly what permission is missing , this is how .
// Note : some PermissionExceptions have no permission provided , only an error message !
channel . sendMessage ( "PermissionError kicking [" ) . append ( member . getEffectiveName ( ) ) . append ( "]: " ) . append ( error . getMessage ( ) ) . queue ( ) ; } else { channel . sendMessage ( "Unknown error while kicking [" ) . append ( member . getEffectiveName ( ) ) . append ( "]: <" ) . append ( error . getClass ( ) . getSimpleName ( ) ) . append ( ">: " ) . append ( error . getMessage ( ) ) . queue ( ) ; } } ) ; } } } else { channel . sendMessage ( "This is a Guild-Only command!" ) . queue ( ) ; } } else if ( msg . equals ( "!block" ) ) { // This is an example of how to use the complete ( ) method on RestAction . The complete method acts similarly to how
// JDABuilder ' s awaitReady ( ) works , it waits until the request has been sent before continuing execution .
// Most developers probably wont need this and can just use queue . If you use complete , JDA will still handle ratelimit
// control , however if shouldQueue is false it won ' t queue the Request to be sent after the ratelimit retry after time is past . It
// will instead fire a RateLimitException !
// One of the major advantages of complete ( ) is that it returns the object that queue ' s success consumer would have ,
// but it does it in the same execution context as when the request was made . This may be important for most developers ,
// but , honestly , queue is most likely what developers will want to use as it is faster .
try { // Note the fact that complete returns the Message object !
// The complete ( ) overload queues the Message for execution and will return when the message was sent
// It does handle rate limits automatically
Message sentMessage = channel . sendMessage ( "I blocked and will return the message!" ) . complete ( ) ; // This should only be used if you are expecting to handle rate limits yourself
// The completion will not succeed if a rate limit is breached and throw a RateLimitException
Message sentRatelimitMessage = channel . sendMessage ( "I expect rate limitation and know how to handle it!" ) . complete ( false ) ; System . out . println ( "Sent a message using blocking! Luckly I didn't get Ratelimited... MessageId: " + sentMessage . getId ( ) ) ; } catch ( RateLimitedException e ) { System . out . println ( "Whoops! Got ratelimited when attempting to use a .complete() on a RestAction! RetryAfter: " + e . getRetryAfter ( ) ) ; } // Note that RateLimitException is the only checked - exception thrown by . complete ( )
catch ( RuntimeException e ) { System . out . println ( "Unfortunately something went wrong when we tried to send the Message and .complete() threw an Exception." ) ; e . printStackTrace ( ) ; } }
|
public class SelectStatement { /** * Get aggregation select items .
* @ return aggregation select items */
public List < AggregationSelectItem > getAggregationSelectItems ( ) { } }
|
List < AggregationSelectItem > result = new LinkedList < > ( ) ; for ( SelectItem each : items ) { if ( each instanceof AggregationSelectItem ) { AggregationSelectItem aggregationSelectItem = ( AggregationSelectItem ) each ; result . add ( aggregationSelectItem ) ; result . addAll ( aggregationSelectItem . getDerivedAggregationSelectItems ( ) ) ; } } return result ;
|
public class GeneratedDContactDaoImpl { /** * query - by method for field tags
* @ param tags the specified attribute
* @ return an Iterable of DContacts for the specified tags */
public Iterable < DContact > queryByTags ( Object parent , java . lang . Object tags ) { } }
|
return queryByField ( parent , DContactMapper . Field . TAGS . getFieldName ( ) , tags ) ;
|
public class CmsResourceUtil { /** * Returns the lock icon path for the given resource . < p >
* Relative to < code > / system / workplace / resources / < / code > . < p >
* Returns < code > explorer / project _ none . gif < / code > if request context is < code > null < / code > . < p >
* @ return the lock icon path for the given resource */
public String getIconPathLock ( ) { } }
|
CmsLock lock = getLock ( ) ; String iconPath = null ; if ( ! lock . isUnlocked ( ) && ( m_request != null ) && isInsideProject ( ) ) { if ( getLock ( ) . isOwnedBy ( m_request . getCurrentUser ( ) ) && ( getLockedInProjectId ( ) . equals ( getReferenceProject ( ) . getUuid ( ) ) ) ) { if ( lock . isShared ( ) ) { iconPath = "shared" ; } else { iconPath = "user" ; } } else { iconPath = "other" ; } } if ( iconPath == null ) { iconPath = "project_none" ; } else { iconPath = "lock_" + iconPath ; } return "explorer/" + iconPath + ".gif" ;
|
public class BigQueryDataMarshallerByType { /** * Converts the input object into a nested map of field name to field value . Recursively breaks
* down complex field types to simple Bigquery types as listed in { @ link BigQueryDataTypeUtil } .
* Uses reflection to infer the type and value of fields . Only accessible fields are marshalled .
* Fields annotated with { @ link BigQueryIgnore } are ignored .
* @ param field { @ link Field } to map to list of repeated values
* @ param fieldValue value of the field to resolve
* @ return a nested map of field name to field value . */
@ SuppressWarnings ( { } }
|
"rawtypes" , "unchecked" } ) List < Object > mapRepeatedFieldToListOfValues ( Field field , Object fieldValue ) { Class < ? > fieldType = getParameterTypeOfRepeatedField ( field ) ; Collection fieldToMap = null ; if ( isCollection ( field . getType ( ) ) ) { fieldToMap = ( Collection ) fieldValue ; } else if ( field . getType ( ) . isArray ( ) ) { fieldToMap = Arrays . asList ( ( Object [ ] ) fieldValue ) ; } List < Object > toRet = Lists . newArrayListWithCapacity ( fieldToMap . size ( ) ) ; if ( isSimpleBigQueryType ( fieldType ) ) { for ( Object o : fieldToMap ) { toRet . add ( o ) ; } } else { for ( Object o : fieldToMap ) { toRet . add ( mapFieldNameToValue ( o ) ) ; } } return toRet ;
|
public class Transfer { /** * Method declaration */
private void initGUI ( ) { } }
|
Font fFont = new Font ( "Dialog" , Font . PLAIN , 12 ) ; setLayout ( new BorderLayout ( ) ) ; Panel p = new Panel ( ) ; p . setBackground ( SystemColor . control ) ; p . setLayout ( new GridLayout ( 16 , 1 ) ) ; tSourceTable = new TextField ( ) ; tSourceTable . setEnabled ( false ) ; tDestTable = new TextField ( ) ; tDestTable . addActionListener ( this ) ; tDestDrop = new TextField ( ) ; tDestDrop . addActionListener ( this ) ; tDestCreate = new TextField ( ) ; tDestCreate . addActionListener ( this ) ; tDestDelete = new TextField ( ) ; tDestDelete . addActionListener ( this ) ; tDestCreateIndex = new TextField ( ) ; tDestCreateIndex . addActionListener ( this ) ; tDestDropIndex = new TextField ( ) ; tDestDropIndex . addActionListener ( this ) ; tSourceSelect = new TextField ( ) ; tSourceSelect . addActionListener ( this ) ; tDestInsert = new TextField ( ) ; tDestInsert . addActionListener ( this ) ; tDestAlter = new TextField ( ) ; tDestAlter . addActionListener ( this ) ; cTransfer = new Checkbox ( "Transfer to destination table" , true ) ; cTransfer . addItemListener ( this ) ; cDrop = new Checkbox ( "Drop destination table (ignore error)" , true ) ; cDrop . addItemListener ( this ) ; cCreate = new Checkbox ( "Create destination table" , true ) ; cCreate . addItemListener ( this ) ; cDropIndex = new Checkbox ( "Drop destination index (ignore error)" , true ) ; cDropIndex . addItemListener ( this ) ; cIdxForced = new Checkbox ( "force Idx_ prefix for indexes names" , false ) ; cIdxForced . addItemListener ( this ) ; cCreateIndex = new Checkbox ( "Create destination index" , true ) ; cCreateIndex . addItemListener ( this ) ; cDelete = new Checkbox ( "Delete rows in destination table" , true ) ; cDelete . addItemListener ( this ) ; cInsert = new Checkbox ( "Insert into destination" , true ) ; cInsert . addItemListener ( this ) ; cFKForced = new Checkbox ( "force FK_ prefix for foreign key names" , false ) ; cFKForced . addItemListener ( this ) ; cAlter = new Checkbox ( "Alter destination table" , true ) ; cAlter . addItemListener ( this ) ; p . add ( createLabel ( "Source table" ) ) ; p . add ( tSourceTable ) ; p . add ( cTransfer ) ; p . add ( tDestTable ) ; p . add ( cDrop ) ; p . add ( tDestDrop ) ; p . add ( cCreate ) ; p . add ( tDestCreate ) ; p . add ( cDropIndex ) ; p . add ( tDestDropIndex ) ; p . add ( cCreateIndex ) ; p . add ( tDestCreateIndex ) ; p . add ( cDelete ) ; p . add ( tDestDelete ) ; p . add ( cAlter ) ; p . add ( tDestAlter ) ; p . add ( createLabel ( "Select source records" ) ) ; p . add ( tSourceSelect ) ; p . add ( cInsert ) ; p . add ( tDestInsert ) ; p . add ( createLabel ( "" ) ) ; p . add ( createLabel ( "" ) ) ; p . add ( cIdxForced ) ; p . add ( cFKForced ) ; p . add ( createLabel ( "" ) ) ; p . add ( createLabel ( "" ) ) ; if ( iTransferMode == TRFM_TRANSFER ) { bStart = new Button ( "Start Transfer" ) ; bContinue = new Button ( "Continue Transfer" ) ; bContinue . setEnabled ( false ) ; } else if ( iTransferMode == Transfer . TRFM_DUMP ) { bStart = new Button ( "Start Dump" ) ; } else if ( iTransferMode == Transfer . TRFM_RESTORE ) { bStart = new Button ( "Start Restore" ) ; } bStart . addActionListener ( this ) ; p . add ( bStart ) ; if ( iTransferMode == TRFM_TRANSFER ) { bContinue . addActionListener ( this ) ; p . add ( bContinue ) ; } bStart . setEnabled ( false ) ; fMain . add ( "Center" , createBorderPanel ( p ) ) ; lTable = new java . awt . List ( 10 ) ; lTable . addItemListener ( this ) ; fMain . add ( "West" , createBorderPanel ( lTable ) ) ; tMessage = new TextField ( ) ; Panel pMessage = createBorderPanel ( tMessage ) ; fMain . add ( "South" , pMessage ) ;
|
public class SQLRecoverableUnitImpl { /** * Forces to the underlying recovery log information from the recoverable unit
* sections . The amount of information written depends on the input argument
* ' rewriteRequired ' . If this flag is false then only information that has not
* not previously been written will be passed to the underlying recover log .
* If this flag is true then all information will be passed to the underlying
* recovery log . Either way , the the underlying recovery log contains an up
* to date copy of the information retained in the target .
* The information is written to the underlying recovery log and forced
* through to persisent storage . After this call , the information is
* guaranteed to be retrieved during any post - failure recovery processing .
* This call my be used as part of an optomization when several recoverable units
* need to be pushed to disk . For example , the following sequence will ensure that
* recoverable units 1 through 4 are all persisted to physical storage : -
* < ul >
* < li > RecoverableUnit1 . writeSections ( . . ) < / li >
* < li > RecoverableUnit2 . writeSections ( . . ) < / li >
* < li > RecoverableUnit3 . writeSections ( . . ) < / li >
* < li > RecoverableUnit4 . forceSections ( . . ) < / li >
* < / ul >
* This internal version of the method is not exposed on the interfaces and can only
* be called from within the RLS . Client services invoke the simpler version of the
* method ( with no arguments ) which deligates down to this method .
* @ param rewriteRequired Boolean flag indicating if a rewrite is required .
* @ exception InternalLogException An unexpected error has occured . */
void forceSections ( boolean rewriteRequired ) throws InternalLogException { } }
|
if ( tc . isEntryEnabled ( ) ) Tr . entry ( tc , "forceSections" , new java . lang . Object [ ] { this , new Boolean ( rewriteRequired ) } ) ; // If the parent recovery log instance has experienced a serious internal error then prevent
// this operation from executing .
if ( _recLog . failed ( ) ) { if ( tc . isEntryEnabled ( ) ) Tr . exit ( tc , "forceSections" , this ) ; throw new InternalLogException ( null ) ; } try { writeSections ( rewriteRequired ) ; _recLog . forceSections ( ) ; } catch ( InternalLogException exc ) { FFDCFilter . processException ( exc , "com.ibm.ws.recoverylog.spi.SQLRecoverableUnitImpl.forceSections" , "531" , this ) ; if ( tc . isEntryEnabled ( ) ) Tr . exit ( tc , "forceSections" , exc ) ; throw exc ; } catch ( Throwable exc ) { FFDCFilter . processException ( exc , "com.ibm.ws.recoverylog.spi.SQLRecoverableUnitImpl.forceSections" , "537" , this ) ; if ( tc . isEntryEnabled ( ) ) Tr . exit ( tc , "forceSections" , "InternalLogException" ) ; throw new InternalLogException ( exc ) ; } if ( tc . isEntryEnabled ( ) ) Tr . exit ( tc , "forceSections" ) ;
|
public class TypeBindingProbe { /** * 确定表达式的类型 */
protected void infer ( ) { } }
|
InferContext ctx = new InferContext ( ) ; ctx . types = types ; ctx . gt = this . program . gt ; for ( Statement st : this . program . metaData . statements ) { st . infer ( ctx ) ; }
|
public class XML { public static Collection < Node > findElementsByName ( String elementName , Node node ) { } }
|
if ( node == null || node . getNodeType ( ) == Node . COMMENT_NODE || node . getNodeType ( ) == Node . TEXT_NODE ) return null ; HashSet < Node > results = new HashSet < Node > ( ) ; if ( elementName . equals ( node . getNodeName ( ) ) ) { results . add ( node ) ; } NodeList nodeList = node . getChildNodes ( ) ; if ( nodeList == null ) return null ; int len = nodeList . getLength ( ) ; Node child ; for ( int i = 0 ; i < len ; i ++ ) { child = nodeList . item ( i ) ; if ( child . getNodeType ( ) == Node . TEXT_NODE ) continue ; if ( elementName . equals ( child . getNodeName ( ) ) ) results . add ( child ) ; // check for next
NodeList grands = child . getChildNodes ( ) ; if ( grands != null ) { int grandsCnt = grands . getLength ( ) ; for ( int x = 0 ; x < grandsCnt ; x ++ ) { Collection < Node > grandResults = findElementsByName ( elementName , grands . item ( x ) ) ; if ( grandResults != null ) results . addAll ( grandResults ) ; } } } return results . isEmpty ( ) ? null : results ;
|
public class EncryptionAtRestMarshaller { /** * Marshall the given parameter object . */
public void marshall ( EncryptionAtRest encryptionAtRest , ProtocolMarshaller protocolMarshaller ) { } }
|
if ( encryptionAtRest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( encryptionAtRest . getCatalogEncryptionMode ( ) , CATALOGENCRYPTIONMODE_BINDING ) ; protocolMarshaller . marshall ( encryptionAtRest . getSseAwsKmsKeyId ( ) , SSEAWSKMSKEYID_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
|
public class Bits { /** * Exposes a < code > SortedSet < / code > of < code > Integer < / code > as
* { @ link BitStore } . The < code > start < / code > and < code > finish < / code >
* parameters must form a valid sub - range of the set . Since it is not
* possible to determine whether a set is modifiable , the mutability of the
* generated { @ link BitStore } must be specified as a call parameter .
* Creating a mutable { @ link BitStore } over an unmodifiable set may result
* in unspecified errors on any attempt to mutate the bit store .
* @ param set
* a sorted set of integers
* @ param start
* the least integer exposed by the { @ link BitStore }
* @ param finish
* the least integer greater than or equal to < code > start < / code >
* that is not exposed by the { @ link BitStore }
* @ param mutable
* whether the returned { @ link BitStore } is mutable
* @ throws IllegalArgumentException
* if the range start - to - finish does not form a valid sub - range
* of the supplied set .
* @ return a { @ link BitStore } view over the set */
public static BitStore asStore ( SortedSet < Integer > set , int start , int finish , boolean mutable ) { } }
|
if ( set == null ) throw new IllegalArgumentException ( "null set" ) ; if ( start < 0L ) throw new IllegalArgumentException ( "negative start" ) ; if ( finish < start ) throw new IllegalArgumentException ( "start exceeds finish" ) ; set = set . subSet ( start , finish ) ; return new IntSetBitStore ( set , start , finish , mutable ) ;
|
public class ReflectUtil { /** * Return the name of the package from which the given type can be used .
* < p > Returns a package from which all the type names contained in the given
* type literal are visible . Throws { @ link IllegalArgumentException } if there
* is no such package . If there are multiple such packages , then the type
* name can be used from any package ; the package containing the outermost
* class is used arbitrarily .
* < p > This method is intentionally not overloaded on Class , because it ' s
* normally an error to use a raw Class token to determine the package in
* which to manipulate a type . */
public static String getUserPackageName ( TypeLiteral < ? > typeLiteral ) { } }
|
Map < String , Class < ? > > packageNames = new LinkedHashMap < String , Class < ? > > ( ) ; getTypePackageNames ( typeLiteral . getType ( ) , packageNames ) ; if ( packageNames . size ( ) == 0 ) { // All type names are public , so typeLiteral is visible from any package .
// Arbitrarily put it in the package declaring the top - level class .
return typeLiteral . getRawType ( ) . getPackage ( ) . getName ( ) ; } else if ( packageNames . size ( ) == 1 ) { // The type contains names that are private to exactly one package ; it
// must be referenced from that package .
return packageNames . keySet ( ) . iterator ( ) . next ( ) ; } else { // The type literal contains types that are private to two or more
// different packages . This can happen if a class uses a type that is
// protected in its parent , and its parent is from another package . For
// instance :
// package pkg1:
// public class Parent {
// protected static class ForSubclasses {
// Here the type ForSubclasses is accessible to anything in the package
// " pkg1 " , but it can ' t be used in another package :
// package pkg2:
// class Foo < T > {
// class Child extends Parent {
// @ Inject Child ( Foo < ForSubclasses > ) { }
// There ' s no package in which we can place code that can create
// Foo < ForSubclasses > , even though the user was able to write that type ,
// because we would have to subclass Parent to do so . ( theoretically we
// could write static helper methods inside a subclass , but that seems
// like too much trouble to support this sort of weirdness )
StringBuilder packageNamesListBuilder = new StringBuilder ( ) ; for ( Class < ? > entry : packageNames . values ( ) ) { packageNamesListBuilder . append ( entry . getCanonicalName ( ) ) . append ( "\n" ) ; } throw new IllegalArgumentException ( PrettyPrinter . format ( "Unable to inject an instance of %s because it references protected classes" + " from multiple packages:\n%s" , typeLiteral , packageNamesListBuilder ) ) ; }
|
public class Sanitizers { /** * Converts plain text to HTML by entity escaping , stripping tags in sanitized content so the
* result can safely be embedded in an unquoted HTML attribute value . */
public static String escapeHtmlAttributeNospace ( SoyValue value ) { } }
|
value = normalizeNull ( value ) ; if ( isSanitizedContentOfKind ( value , SanitizedContent . ContentKind . HTML ) ) { // | escapeHtmlAttributeNospace should only be used on attribute values that cannot have tags .
return stripHtmlTags ( value . coerceToString ( ) , null , false ) ; } return escapeHtmlAttributeNospace ( value . coerceToString ( ) ) ;
|
public class AssetHolderDelegate { private boolean validAsset ( OrchidPage asset , String targetExtension ) { } }
|
return asset . getReference ( ) . getOutputExtension ( ) . equalsIgnoreCase ( targetExtension ) ;
|
public class Database { public void put_class_attribute_property ( String classname , DbAttribute attr ) throws DevFailed { } }
|
databaseDAO . put_class_attribute_property ( this , classname , attr ) ;
|
public class InternalSimpleAntlrParser { /** * InternalSimpleAntlr . g : 1277:1 : ruleParenthesized returns [ EObject current = null ] : ( this _ OPEN _ 0 = RULE _ OPEN this _ Alternatives _ 1 = ruleAlternatives otherlv _ 2 = ' ) ' ) ; */
public final EObject ruleParenthesized ( ) throws RecognitionException { } }
|
EObject current = null ; Token this_OPEN_0 = null ; Token otherlv_2 = null ; EObject this_Alternatives_1 = null ; enterRule ( ) ; try { // InternalSimpleAntlr . g : 1280:28 : ( ( this _ OPEN _ 0 = RULE _ OPEN this _ Alternatives _ 1 = ruleAlternatives otherlv _ 2 = ' ) ' ) )
// InternalSimpleAntlr . g : 1281:1 : ( this _ OPEN _ 0 = RULE _ OPEN this _ Alternatives _ 1 = ruleAlternatives otherlv _ 2 = ' ) ' )
{ // InternalSimpleAntlr . g : 1281:1 : ( this _ OPEN _ 0 = RULE _ OPEN this _ Alternatives _ 1 = ruleAlternatives otherlv _ 2 = ' ) ' )
// InternalSimpleAntlr . g : 1281:2 : this _ OPEN _ 0 = RULE _ OPEN this _ Alternatives _ 1 = ruleAlternatives otherlv _ 2 = ' ) '
{ this_OPEN_0 = ( Token ) match ( input , RULE_OPEN , FOLLOW_14 ) ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { newLeafNode ( this_OPEN_0 , grammarAccess . getParenthesizedAccess ( ) . getOPENTerminalRuleCall_0 ( ) ) ; } if ( state . backtracking == 0 ) { } if ( state . backtracking == 0 ) { newCompositeNode ( grammarAccess . getParenthesizedAccess ( ) . getAlternativesParserRuleCall_1 ( ) ) ; } pushFollow ( FOLLOW_27 ) ; this_Alternatives_1 = ruleAlternatives ( ) ; state . _fsp -- ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { current = this_Alternatives_1 ; afterParserOrEnumRuleCall ( ) ; } otherlv_2 = ( Token ) match ( input , 34 , FOLLOW_2 ) ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { newLeafNode ( otherlv_2 , grammarAccess . getParenthesizedAccess ( ) . getRightParenthesisKeyword_2 ( ) ) ; } } } if ( state . backtracking == 0 ) { leaveRule ( ) ; } } catch ( RecognitionException re ) { recover ( input , re ) ; appendSkippedTokens ( ) ; } finally { } return current ;
|
public class Detector { /** * Gets the coordinate of the first point with a different color in the given direction */
private Point getFirstDifferent ( Point init , boolean color , int dx , int dy ) { } }
|
int x = init . getX ( ) + dx ; int y = init . getY ( ) + dy ; while ( isValid ( x , y ) && image . get ( x , y ) == color ) { x += dx ; y += dy ; } x -= dx ; y -= dy ; while ( isValid ( x , y ) && image . get ( x , y ) == color ) { x += dx ; } x -= dx ; while ( isValid ( x , y ) && image . get ( x , y ) == color ) { y += dy ; } y -= dy ; return new Point ( x , y ) ;
|
public class BlacklistUrlFilter { /** * Initialize " deny " parameter from web . xml .
* @ param filterConfig A filter configuration object used by a servlet container
* to pass information to a filter during initialization . */
public void init ( final FilterConfig filterConfig ) { } }
|
final String denyParam = filterConfig . getInitParameter ( "denyUrls" ) ; if ( StringUtils . isNotBlank ( denyParam ) ) { this . denyUrls = denyParam . split ( "," ) ; } final String ignoreParam = filterConfig . getInitParameter ( "ignoreUrls" ) ; if ( StringUtils . isNotBlank ( ignoreParam ) ) { this . ignoreUrls = ignoreParam . split ( "," ) ; }
|
public class FileSystemUtilities { /** * If the supplied fileOrDir is a File , it is added to the returned List if any of the filters Match .
* If the supplied fileOrDir is a Directory , it is listed and any of the files immediately within the fileOrDir
* directory are returned within the resulting List provided that they match any of the supplied filters .
* @ param fileOrDir A File or Directory .
* @ param fileFilters A List of filter of which at least one must match to add the File
* ( or child Files , in case of a directory ) to the resulting List .
* @ param log The active Maven Log
* @ return A List holding the supplied File ( or child Files , in case fileOrDir is a Directory ) given that at
* least one Filter accepts them . */
@ SuppressWarnings ( "all" ) public static List < File > listFiles ( final File fileOrDir , final List < Filter < File > > fileFilters , final Log log ) { } }
|
return listFiles ( fileOrDir , fileFilters , false , log ) ;
|
public class AmazonEC2Client { /** * Displays details about an import virtual machine or import snapshot tasks that are already created .
* @ param describeImportImageTasksRequest
* Contains the parameters for DescribeImportImageTasks .
* @ return Result of the DescribeImportImageTasks operation returned by the service .
* @ sample AmazonEC2 . DescribeImportImageTasks
* @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / ec2-2016-11-15 / DescribeImportImageTasks " target = " _ top " > AWS
* API Documentation < / a > */
@ Override public DescribeImportImageTasksResult describeImportImageTasks ( DescribeImportImageTasksRequest request ) { } }
|
request = beforeClientExecution ( request ) ; return executeDescribeImportImageTasks ( request ) ;
|
public class HttpClient { /** * Perform a delete against the WSAPI
* @ param url the request url
* @ return the JSON encoded string response
* @ throws IOException if a non - 200 response code is returned or if some other
* problem occurs while executing the request */
public String doDelete ( String url ) throws IOException { } }
|
HttpDelete httpDelete = new HttpDelete ( getWsapiUrl ( ) + url ) ; return doRequest ( httpDelete ) ;
|
public class ContextFromVertx { /** * This will give you the request body nicely parsed . You can register your
* own parsers depending on the request type .
* @ param classOfT The class of the result .
* @ return The parsed request or null if something went wrong . */
@ Override public < T > T body ( Class < T > classOfT , Type genericType ) { } }
|
String rawContentType = request ( ) . contentType ( ) ; // If the Content - type : xxx header is not set we return null .
// we cannot parse that request .
if ( rawContentType == null ) { return null ; } // If Content - type is application / json ; charset = utf - 8 we split away the charset
// application / json
String contentTypeOnly = HttpUtils . getContentTypeFromContentTypeAndCharacterSetting ( rawContentType ) ; BodyParser parser = services . getContentEngines ( ) . getBodyParserEngineForContentType ( contentTypeOnly ) ; if ( parser == null ) { return null ; } return parser . invoke ( this , classOfT , genericType ) ;
|
public class PrettyTimeFormat { /** * Convenience format method for precise durations .
* @ param ref
* The date of reference .
* @ param then
* The future date .
* @ param precision
* The precision to retain in milliseconds .
* @ return a relative format date as text representation or an empty string
* if no durations are retained */
public String format ( Date ref , Date then , long precision ) { } }
|
List < Duration > durations = DurationHelper . calculatePreciseDuration ( ref , then , prettyTime . getUnits ( ) ) ; List < Duration > retained = retainPrecision ( durations , precision ) ; return retained . isEmpty ( ) ? "" : prettyTime . format ( retained ) ;
|
public class CommerceShippingFixedOptionRelPersistenceImpl { /** * Removes all the commerce shipping fixed option rels where commerceShippingFixedOptionId = & # 63 ; from the database .
* @ param commerceShippingFixedOptionId the commerce shipping fixed option ID */
@ Override public void removeByCommerceShippingFixedOptionId ( long commerceShippingFixedOptionId ) { } }
|
for ( CommerceShippingFixedOptionRel commerceShippingFixedOptionRel : findByCommerceShippingFixedOptionId ( commerceShippingFixedOptionId , QueryUtil . ALL_POS , QueryUtil . ALL_POS , null ) ) { remove ( commerceShippingFixedOptionRel ) ; }
|
public class ProcessRecordService { /** * Write all fields of a record to HBase . To be used in initial insert , or to
* overwrite whatever values are there in HBase .
* Consider using { @ link # setProcessState ( ProcessRecord , ProcessState ) } if you
* want to update only the state .
* @ param processRecord non - null ProcessRecord to write to HBase .
* @ throws IOException if the record cannot be written . */
public void writeJobRecord ( ProcessRecord processRecord ) throws IOException { } }
|
byte [ ] key = keyConv . toBytes ( processRecord . getKey ( ) ) ; Put put = new Put ( key ) ; // Constants . JOB _ FILE _ PROCESS _ TABLE _ BYTES
put . addColumn ( Constants . INFO_FAM_BYTES , Constants . MIN_MOD_TIME_MILLIS_COLUMN_BYTES , Bytes . toBytes ( processRecord . getMinModificationTimeMillis ( ) ) ) ; put . addColumn ( Constants . INFO_FAM_BYTES , Constants . PROCESSED_JOB_FILES_COLUMN_BYTES , Bytes . toBytes ( processRecord . getProcessedJobFiles ( ) ) ) ; put . addColumn ( Constants . INFO_FAM_BYTES , Constants . PROCESS_FILE_COLUMN_BYTES , Bytes . toBytes ( processRecord . getProcessFile ( ) ) ) ; put . addColumn ( Constants . INFO_FAM_BYTES , Constants . PROCESSING_STATE_COLUMN_BYTES , Bytes . toBytes ( processRecord . getProcessState ( ) . getCode ( ) ) ) ; put . addColumn ( Constants . INFO_FAM_BYTES , Constants . MIN_JOB_ID_COLUMN_BYTES , Bytes . toBytes ( processRecord . getMinJobId ( ) ) ) ; put . addColumn ( Constants . INFO_FAM_BYTES , Constants . MAX_JOB_ID_COLUMN_BYTES , Bytes . toBytes ( processRecord . getMaxJobId ( ) ) ) ; Table processRecordTable = null ; try { processRecordTable = hbaseConnection . getTable ( TableName . valueOf ( Constants . JOB_FILE_PROCESS_TABLE ) ) ; processRecordTable . put ( put ) ; } finally { if ( processRecordTable != null ) { processRecordTable . close ( ) ; } }
|
public class ChronoEntity { /** * / * [ deutsch ]
* < p > L & auml ; & szlig ; t die angegebene Abfrage diese Entit & auml ; t
* auswerten . < / p >
* < p > Entspricht { @ code function . apply ( this ) } . Hier & uuml ; ber wird der
* Vorgang der Zeitinterpretation externalisiert und erm & ouml ; glicht
* so benutzerdefinierte Abfragen mit beliebigen Ergebnistypen . Anders
* als bei chronologischen Elementen ist hier nur ein Lesezugriff
* m & ouml ; glich . In der Dokumentation der jeweiligen { @ code ChronoFunction }
* ist nachzuschauen , ob diese Methode im Fall undefinierter Ergebnisse
* { @ code null } zur & uuml ; ckgibt oder eine Ausnahme wirft . < / p >
* @ param < R > generic type of result of query
* @ param function time query
* @ return result of query or { @ code null } if undefined
* @ throws ChronoException if the given query is not executable */
public final < R > R get ( ChronoFunction < ? super T , R > function ) { } }
|
return function . apply ( this . getContext ( ) ) ;
|
public class GetConnectionsFilterMarshaller { /** * Marshall the given parameter object . */
public void marshall ( GetConnectionsFilter getConnectionsFilter , ProtocolMarshaller protocolMarshaller ) { } }
|
if ( getConnectionsFilter == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( getConnectionsFilter . getMatchCriteria ( ) , MATCHCRITERIA_BINDING ) ; protocolMarshaller . marshall ( getConnectionsFilter . getConnectionType ( ) , CONNECTIONTYPE_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
|
public class ClassPathSpringRestClientScanner { /** * { @ inheritDoc } */
@ Override protected boolean checkCandidate ( String beanName , BeanDefinition beanDefinition ) throws IllegalStateException { } }
|
if ( super . checkCandidate ( beanName , beanDefinition ) ) { return true ; } else { log . warn ( "Skipping SpringRestClientFactoryBean with name '" + beanName + "' and '" + beanDefinition . getBeanClassName ( ) + "' interfaceClazz" + ". Bean already defined with the same name!" ) ; return false ; }
|
public class ProximityTracker { /** * Removes from the tracker the object that is referentially equal to
* ( < code > o1 = = object < / code > ) the specified object .
* @ return true if an object was located and removed , false if not . */
public boolean removeObject ( Object object ) { } }
|
for ( int i = 0 ; i < _size ; i ++ ) { if ( _records [ i ] . object == object ) { // shift everything down
System . arraycopy ( _records , i + 1 , _records , i , _size - ( i + 1 ) ) ; // clear out the trailing reference
_records [ -- _size ] = null ; return true ; } } return false ;
|
public class CronTab { /** * Checks if this crontab entry looks reasonable ,
* and if not , return an warning message .
* The point of this method is to catch syntactically correct
* but semantically suspicious combinations , like */
public @ CheckForNull String checkSanity ( ) { } }
|
OUTER : for ( int i = 0 ; i < 5 ; i ++ ) { long bitMask = ( i < 4 ) ? bits [ i ] : ( long ) dayOfWeek ; for ( int j = BaseParser . LOWER_BOUNDS [ i ] ; j <= BaseParser . UPPER_BOUNDS [ i ] ; j ++ ) { if ( ! checkBits ( bitMask , j ) ) { // this rank has a sparse entry .
// if we have a sparse rank , one of them better be the left - most .
if ( i > 0 ) return Messages . CronTab_do_you_really_mean_every_minute_when_you ( spec , "H " + spec . substring ( spec . indexOf ( ' ' ) + 1 ) ) ; // once we find a sparse rank , upper ranks don ' t matter
break OUTER ; } } } int daysOfMonth = 0 ; for ( int i = 1 ; i < 31 ; i ++ ) { if ( checkBits ( bits [ 2 ] , i ) ) { daysOfMonth ++ ; } } if ( daysOfMonth > 5 && daysOfMonth < 28 ) { // a bit arbitrary
return Messages . CronTab_short_cycles_in_the_day_of_month_field_w ( ) ; } String hashified = hashify ( spec ) ; if ( hashified != null ) { return Messages . CronTab_spread_load_evenly_by_using_rather_than_ ( hashified , spec ) ; } return null ;
|
public class AnimaQuery { /** * Querying a list
* @ param type model type
* @ param sql sql statement
* @ param params params
* @ param < S >
* @ return List < S > */
public < S > List < S > queryList ( Class < S > type , String sql , List < Object > params ) { } }
|
return this . queryList ( type , sql , params . toArray ( ) ) ;
|
public class GeometryConverterService { /** * Convert a Geomajas geometry to a JTS geometry .
* @ param geometry Geomajas geometry
* @ return JTS geometry
* @ throws JtsConversionException conversion failed */
public static com . vividsolutions . jts . geom . Geometry toJts ( Geometry geometry ) throws JtsConversionException { } }
|
if ( geometry == null ) { throw new JtsConversionException ( "Cannot convert null argument" ) ; } int srid = geometry . getSrid ( ) ; int precision = geometry . getPrecision ( ) ; PrecisionModel model ; if ( precision == - 1 ) { model = new PrecisionModel ( PrecisionModel . FLOATING ) ; } else { model = new PrecisionModel ( Math . pow ( 10 , precision ) ) ; } GeometryFactory factory = new GeometryFactory ( model , srid ) ; com . vividsolutions . jts . geom . Geometry jts ; String geometryType = geometry . getGeometryType ( ) ; if ( GeometryService . isEmpty ( geometry ) ) { jts = createEmpty ( factory , geometryType ) ; } else if ( Geometry . POINT . equals ( geometryType ) ) { jts = factory . createPoint ( convertCoordinates ( geometry ) [ 0 ] ) ; } else if ( Geometry . LINEAR_RING . equals ( geometryType ) ) { jts = factory . createLinearRing ( convertCoordinates ( geometry ) ) ; } else if ( Geometry . LINE_STRING . equals ( geometryType ) ) { jts = factory . createLineString ( convertCoordinates ( geometry ) ) ; } else if ( Geometry . POLYGON . equals ( geometryType ) ) { Geometry [ ] geometries = geometry . getGeometries ( ) ; if ( null != geometries && geometries . length > 0 ) { LinearRing exteriorRing = ( LinearRing ) toJts ( geometries [ 0 ] ) ; LinearRing [ ] interiorRings = new LinearRing [ geometries . length - 1 ] ; for ( int i = 0 ; i < interiorRings . length ; i ++ ) { interiorRings [ i ] = ( LinearRing ) toJts ( geometries [ i + 1 ] ) ; } jts = factory . createPolygon ( exteriorRing , interiorRings ) ; } else { jts = factory . createPolygon ( null , null ) ; } } else if ( Geometry . MULTI_POINT . equals ( geometryType ) ) { Point [ ] points = new Point [ geometry . getGeometries ( ) . length ] ; jts = factory . createMultiPoint ( ( Point [ ] ) convertGeometries ( geometry , points ) ) ; } else if ( Geometry . MULTI_LINE_STRING . equals ( geometryType ) ) { LineString [ ] lineStrings = new LineString [ geometry . getGeometries ( ) . length ] ; jts = factory . createMultiLineString ( ( LineString [ ] ) convertGeometries ( geometry , lineStrings ) ) ; } else if ( Geometry . MULTI_POLYGON . equals ( geometryType ) ) { Polygon [ ] polygons = new Polygon [ geometry . getGeometries ( ) . length ] ; jts = factory . createMultiPolygon ( ( Polygon [ ] ) convertGeometries ( geometry , polygons ) ) ; } else { throw new JtsConversionException ( "Cannot convert geometry: Unsupported type." ) ; } return jts ;
|
public class AdHocCompilerCache { /** * Start a timer that prints cache stats to the console every 5s .
* Used for development until we get better stats integration . */
public void startPeriodicStatsPrinting ( ) { } }
|
if ( m_statsTimer == null ) { m_statsTimer = new Timer ( ) ; m_statsTimer . scheduleAtFixedRate ( new TimerTask ( ) { @ Override public void run ( ) { printStats ( ) ; } } , 5000 , 5000 ) ; }
|
public class CachingPersonAttributeDaoImpl { /** * / * ( non - Javadoc )
* @ see org . jasig . services . persondir . IPersonAttributeDao # getPossibleUserAttributeNames ( ) */
@ Override @ JsonIgnore public Set < String > getPossibleUserAttributeNames ( final IPersonAttributeDaoFilter filter ) { } }
|
return this . cachedPersonAttributesDao . getPossibleUserAttributeNames ( filter ) ;
|
public class RecastMesh { /** * / @ see rcAllocPolyMesh , rcContourSet , rcPolyMesh , rcConfig */
public static PolyMesh buildPolyMesh ( Context ctx , ContourSet cset , int nvp ) { } }
|
ctx . startTimer ( "BUILD_POLYMESH" ) ; PolyMesh mesh = new PolyMesh ( ) ; RecastVectors . copy ( mesh . bmin , cset . bmin , 0 ) ; RecastVectors . copy ( mesh . bmax , cset . bmax , 0 ) ; mesh . cs = cset . cs ; mesh . ch = cset . ch ; mesh . borderSize = cset . borderSize ; mesh . maxEdgeError = cset . maxError ; int maxVertices = 0 ; int maxTris = 0 ; int maxVertsPerCont = 0 ; for ( int i = 0 ; i < cset . conts . size ( ) ; ++ i ) { // Skip null contours .
if ( cset . conts . get ( i ) . nverts < 3 ) continue ; maxVertices += cset . conts . get ( i ) . nverts ; maxTris += cset . conts . get ( i ) . nverts - 2 ; maxVertsPerCont = Math . max ( maxVertsPerCont , cset . conts . get ( i ) . nverts ) ; } if ( maxVertices >= 0xfffe ) { throw new RuntimeException ( "rcBuildPolyMesh: Too many vertices " + maxVertices ) ; } int [ ] vflags = new int [ maxVertices ] ; mesh . verts = new int [ maxVertices * 3 ] ; mesh . polys = new int [ maxTris * nvp * 2 ] ; Arrays . fill ( mesh . polys , RC_MESH_NULL_IDX ) ; mesh . regs = new int [ maxTris ] ; mesh . areas = new int [ maxTris ] ; mesh . nverts = 0 ; mesh . npolys = 0 ; mesh . nvp = nvp ; mesh . maxpolys = maxTris ; int [ ] nextVert = new int [ maxVertices ] ; int [ ] firstVert = new int [ VERTEX_BUCKET_COUNT ] ; for ( int i = 0 ; i < VERTEX_BUCKET_COUNT ; ++ i ) firstVert [ i ] = - 1 ; int [ ] indices = new int [ maxVertsPerCont ] ; int [ ] tris = new int [ maxVertsPerCont * 3 ] ; int [ ] polys = new int [ ( maxVertsPerCont + 1 ) * nvp ] ; int tmpPoly = maxVertsPerCont * nvp ; for ( int i = 0 ; i < cset . conts . size ( ) ; ++ i ) { Contour cont = cset . conts . get ( i ) ; // Skip null contours .
if ( cont . nverts < 3 ) continue ; // Triangulate contour
for ( int j = 0 ; j < cont . nverts ; ++ j ) indices [ j ] = j ; int ntris = triangulate ( cont . nverts , cont . verts , indices , tris ) ; if ( ntris <= 0 ) { // Bad triangulation , should not happen .
ctx . warn ( "buildPolyMesh: Bad triangulation Contour " + i + "." ) ; ntris = - ntris ; } // Add and merge vertices .
for ( int j = 0 ; j < cont . nverts ; ++ j ) { int v = j * 4 ; int [ ] inv = addVertex ( cont . verts [ v + 0 ] , cont . verts [ v + 1 ] , cont . verts [ v + 2 ] , mesh . verts , firstVert , nextVert , mesh . nverts ) ; indices [ j ] = inv [ 0 ] ; mesh . nverts = inv [ 1 ] ; if ( ( cont . verts [ v + 3 ] & RC_BORDER_VERTEX ) != 0 ) { // This vertex should be removed .
vflags [ indices [ j ] ] = 1 ; } } // Build initial polygons .
int npolys = 0 ; Arrays . fill ( polys , RC_MESH_NULL_IDX ) ; for ( int j = 0 ; j < ntris ; ++ j ) { int t = j * 3 ; if ( tris [ t + 0 ] != tris [ t + 1 ] && tris [ t + 0 ] != tris [ t + 2 ] && tris [ t + 1 ] != tris [ t + 2 ] ) { polys [ npolys * nvp + 0 ] = indices [ tris [ t + 0 ] ] ; polys [ npolys * nvp + 1 ] = indices [ tris [ t + 1 ] ] ; polys [ npolys * nvp + 2 ] = indices [ tris [ t + 2 ] ] ; npolys ++ ; } } if ( npolys == 0 ) continue ; // Merge polygons .
if ( nvp > 3 ) { for ( ; ; ) { // Find best polygons to merge .
int bestMergeVal = 0 ; int bestPa = 0 , bestPb = 0 , bestEa = 0 , bestEb = 0 ; for ( int j = 0 ; j < npolys - 1 ; ++ j ) { int pj = j * nvp ; for ( int k = j + 1 ; k < npolys ; ++ k ) { int pk = k * nvp ; int [ ] veaeb = getPolyMergeValue ( polys , pj , pk , mesh . verts , nvp ) ; int v = veaeb [ 0 ] ; int ea = veaeb [ 1 ] ; int eb = veaeb [ 2 ] ; if ( v > bestMergeVal ) { bestMergeVal = v ; bestPa = j ; bestPb = k ; bestEa = ea ; bestEb = eb ; } } } if ( bestMergeVal > 0 ) { // Found best , merge .
int pa = bestPa * nvp ; int pb = bestPb * nvp ; mergePolyVerts ( polys , pa , pb , bestEa , bestEb , tmpPoly , nvp ) ; int lastPoly = ( npolys - 1 ) * nvp ; if ( pb != lastPoly ) { System . arraycopy ( polys , lastPoly , polys , pb , nvp ) ; } npolys -- ; } else { // Could not merge any polygons , stop .
break ; } } } // Store polygons .
for ( int j = 0 ; j < npolys ; ++ j ) { int p = mesh . npolys * nvp * 2 ; int q = j * nvp ; for ( int k = 0 ; k < nvp ; ++ k ) mesh . polys [ p + k ] = polys [ q + k ] ; mesh . regs [ mesh . npolys ] = cont . reg ; mesh . areas [ mesh . npolys ] = cont . area ; mesh . npolys ++ ; if ( mesh . npolys > maxTris ) { throw new RuntimeException ( "rcBuildPolyMesh: Too many polygons " + mesh . npolys + " (max:" + maxTris + ")." ) ; } } } // Remove edge vertices .
for ( int i = 0 ; i < mesh . nverts ; ++ i ) { if ( vflags [ i ] != 0 ) { if ( ! canRemoveVertex ( ctx , mesh , i ) ) continue ; removeVertex ( ctx , mesh , i , maxTris ) ; // Remove vertex
// Note : mesh . nverts is already decremented inside removeVertex ( ) !
// Fixup vertex flags
for ( int j = i ; j < mesh . nverts ; ++ j ) vflags [ j ] = vflags [ j + 1 ] ; -- i ; } } // Calculate adjacency .
buildMeshAdjacency ( mesh . polys , mesh . npolys , mesh . nverts , nvp ) ; // Find portal edges
if ( mesh . borderSize > 0 ) { int w = cset . width ; int h = cset . height ; for ( int i = 0 ; i < mesh . npolys ; ++ i ) { int p = i * 2 * nvp ; for ( int j = 0 ; j < nvp ; ++ j ) { if ( mesh . polys [ p + j ] == RC_MESH_NULL_IDX ) break ; // Skip connected edges .
if ( mesh . polys [ p + nvp + j ] != RC_MESH_NULL_IDX ) continue ; int nj = j + 1 ; if ( nj >= nvp || mesh . polys [ p + nj ] == RC_MESH_NULL_IDX ) nj = 0 ; int va = mesh . polys [ p + j ] * 3 ; int vb = mesh . polys [ p + nj ] * 3 ; if ( mesh . verts [ va + 0 ] == 0 && mesh . verts [ vb + 0 ] == 0 ) mesh . polys [ p + nvp + j ] = 0x8000 | 0 ; else if ( mesh . verts [ va + 2 ] == h && mesh . verts [ vb + 2 ] == h ) mesh . polys [ p + nvp + j ] = 0x8000 | 1 ; else if ( mesh . verts [ va + 0 ] == w && mesh . verts [ vb + 0 ] == w ) mesh . polys [ p + nvp + j ] = 0x8000 | 2 ; else if ( mesh . verts [ va + 2 ] == 0 && mesh . verts [ vb + 2 ] == 0 ) mesh . polys [ p + nvp + j ] = 0x8000 | 3 ; } } } // Just allocate the mesh flags array . The user is resposible to fill it .
mesh . flags = new int [ mesh . npolys ] ; if ( mesh . nverts > 0xffff ) { throw new RuntimeException ( "rcBuildPolyMesh: The resulting mesh has too many vertices " + mesh . nverts + " (max " + 0xffff + "). Data can be corrupted." ) ; } if ( mesh . npolys > 0xffff ) { throw new RuntimeException ( "rcBuildPolyMesh: The resulting mesh has too many polygons " + mesh . npolys + " (max " + 0xffff + "). Data can be corrupted." ) ; } ctx . stopTimer ( "BUILD_POLYMESH" ) ; return mesh ;
|
public class CPOptionValueUtil { /** * Returns the last cp option value in the ordered set where CPOptionId = & # 63 ; .
* @ param CPOptionId the cp option ID
* @ param orderByComparator the comparator to order the set by ( optionally < code > null < / code > )
* @ return the last matching cp option value , or < code > null < / code > if a matching cp option value could not be found */
public static CPOptionValue fetchByCPOptionId_Last ( long CPOptionId , OrderByComparator < CPOptionValue > orderByComparator ) { } }
|
return getPersistence ( ) . fetchByCPOptionId_Last ( CPOptionId , orderByComparator ) ;
|
public class CmsGalleryControllerHandler { /** * Will be triggered when the categories tab is selected . < p > */
public void onCategoriesTabSelection ( ) { } }
|
if ( ! m_galleryDialog . getCategoriesTab ( ) . isInitOpen ( ) ) { m_galleryDialog . getCategoriesTab ( ) . onContentChange ( ) ; return ; } m_galleryDialog . getCategoriesTab ( ) . openFirstLevel ( ) ; m_galleryDialog . getCategoriesTab ( ) . setInitOpen ( false ) ;
|
public class CreateActivityRequestMarshaller { /** * Marshall the given parameter object . */
public void marshall ( CreateActivityRequest createActivityRequest , ProtocolMarshaller protocolMarshaller ) { } }
|
if ( createActivityRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( createActivityRequest . getName ( ) , NAME_BINDING ) ; protocolMarshaller . marshall ( createActivityRequest . getTags ( ) , TAGS_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
|
public class MetricsProtoUtils { /** * BucketOptions . */
private static DistributionValue . BucketOptions toBucketOptionsProto ( Distribution . BucketOptions bucketOptions ) { } }
|
final DistributionValue . BucketOptions . Builder builder = DistributionValue . BucketOptions . newBuilder ( ) ; bucketOptions . match ( new Function < Distribution . BucketOptions . ExplicitOptions , Void > ( ) { @ Override public Void apply ( Distribution . BucketOptions . ExplicitOptions arg ) { builder . setExplicit ( DistributionValue . BucketOptions . Explicit . newBuilder ( ) . addAllBounds ( arg . getBucketBoundaries ( ) ) . build ( ) ) ; return null ; } } , Functions . < Void > throwAssertionError ( ) ) ; return builder . build ( ) ;
|
public class BizwifiAPI { /** * 设置微信首页欢迎语
* 设置微信首页欢迎语 , 可选择 “ 欢迎光临XXX ” 或 “ 已连接XXXWiFi ” , XXX为公众号名称或门店名称 。
* @ param accessToken accessToken
* @ param barSet barSet
* @ return BaseResult */
public static BaseResult barSet ( String accessToken , BarSet barSet ) { } }
|
return barSet ( accessToken , JsonUtil . toJSONString ( barSet ) ) ;
|
public class XMLParser { /** * Skip .
* @ throws IOException Signals that an I / O exception has occurred .
* @ throws KriptonRuntimeException the kripton runtime exception */
private void skip ( ) throws IOException , KriptonRuntimeException { } }
|
while ( position < limit || fillBuffer ( 1 ) ) { int c = buffer [ position ] ; if ( c > ' ' ) { break ; } position ++ ; }
|
public class RabbitmqClusterContext { /** * キュー一覧を検証して設定する 。
* @ param queueList the queueList to set
* @ throws RabbitmqCommunicateException キュー一覧が指定されていない場合 */
public void setQueueList ( List < String > queueList ) throws RabbitmqCommunicateException { } }
|
if ( queueList == null || queueList . size ( ) == 0 ) { String message = "QueueList is not defined." ; throw new RabbitmqCommunicateException ( message ) ; } this . queueList = queueList ;
|
public class PropertyHelper { /** * Checks whether the property of the given name is allowed for the model element .
* @ param defClass The class of the model element
* @ param propertyName The name of the property
* @ return < code > true < / code > if the property is allowed for this type of model elements */
public static boolean isPropertyAllowed ( Class defClass , String propertyName ) { } }
|
HashMap props = ( HashMap ) _properties . get ( defClass ) ; return ( props == null ? true : props . containsKey ( propertyName ) ) ;
|
public class CommerceWarehouseUtil { /** * Returns the last commerce warehouse in the ordered set where groupId = & # 63 ; and active = & # 63 ; .
* @ param groupId the group ID
* @ param active the active
* @ param orderByComparator the comparator to order the set by ( optionally < code > null < / code > )
* @ return the last matching commerce warehouse
* @ throws NoSuchWarehouseException if a matching commerce warehouse could not be found */
public static CommerceWarehouse findByG_A_Last ( long groupId , boolean active , OrderByComparator < CommerceWarehouse > orderByComparator ) throws com . liferay . commerce . exception . NoSuchWarehouseException { } }
|
return getPersistence ( ) . findByG_A_Last ( groupId , active , orderByComparator ) ;
|
public class ObjectUtils { /** * < p > Appends the toString that would be produced by { @ code Object }
* if a class did not override toString itself . { @ code null }
* will throw a NullPointerException for either of the two parameters . < / p >
* < pre >
* ObjectUtils . identityToString ( appendable , " " ) = appendable . append ( " java . lang . String @ 1e23"
* ObjectUtils . identityToString ( appendable , Boolean . TRUE ) = appendable . append ( " java . lang . Boolean @ 7fa "
* ObjectUtils . identityToString ( appendable , Boolean . TRUE ) = appendable . append ( " java . lang . Boolean @ 7fa " )
* < / pre >
* @ param appendable the appendable to append to
* @ param object the object to create a toString for
* @ throws IOException if an I / O error occurs
* @ since 3.2 */
public static void identityToString ( final Appendable appendable , final Object object ) throws IOException { } }
|
Validate . notNull ( object , "Cannot get the toString of a null identity" ) ; appendable . append ( object . getClass ( ) . getName ( ) ) . append ( '@' ) . append ( Integer . toHexString ( System . identityHashCode ( object ) ) ) ;
|
public class ServerStats { /** * Gets the time when the varios percentile data was last updated . */
@ Monitor ( name = "ResponseTimePercentileWhen" , type = DataSourceType . INFORMATIONAL , description = "The time the percentile values were computed" ) public String getResponseTimePercentileTime ( ) { } }
|
return dataDist . getTimestamp ( ) ;
|
public class ElementGroup { /** * Add a new element to the group .
* @ param element the element to add to the group */
public void add ( IRenderingElement element ) { } }
|
if ( element != null ) { if ( element . getClass ( ) . equals ( ElementGroup . class ) ) elements . addAll ( ( ( ElementGroup ) element ) . elements ) ; else elements . add ( element ) ; }
|
public class SourceFile { /** * Helper function for sanity checking marker existence . */
protected void check ( File source , String mname , int mline , String fname , int fline ) throws IOException { } }
|
if ( mline == - 1 && fline != - 1 ) { throw new IOException ( "Found " + fname + " marker (at line " + ( fline + 1 ) + ") but no " + mname + " marker in '" + source + "'." ) ; }
|
public class WSJdbcStatement { /** * < p > Close the first result set stored in childWrapper . < / p >
* @ param closeWrapperOnly boolean flag to indicate that only wrapper - closure activities
* should be performed , but close of the underlying object is unnecessary .
* < p > When this method is called , the childWrapper is guaranteed to be not null . < / p > */
final protected void closeAndRemoveResultSet ( boolean closeWrapperOnly ) { } }
|
// close and remove the first result set
// - remove childWrapper ! = null check since the precondition of this method
// is that the childWrapper is not null
try { ( ( WSJdbcObject ) childWrapper ) . close ( closeWrapperOnly ) ; } catch ( SQLException ex ) { // Just trace the error since we need to continue
FFDCFilter . processException ( ex , "com.ibm.ws.rsadapter.jdbc.WSJdbcStatement.closeAndRemoveResultSet" , "275" , this ) ; Tr . warning ( tc , "ERR_CLOSING_OBJECT" , childWrapper , ex ) ; } childWrapper = null ;
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.