signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class CapacityUtil { /** * Returns the next possible capacity , counting from the current buffers ' size . */ public static int nextCapacity ( int current ) { } }
assert current > 0 && Long . bitCount ( current ) == 1 : "Capacity must be a power of two." ; if ( current < MIN_CAPACITY / 2 ) { current = MIN_CAPACITY / 2 ; } current <<= 1 ; if ( current < 0 ) { throw new RuntimeException ( "Maximum capacity exceeded." ) ; } return current ;
public class DitaValReader { /** * Insert subject scheme based action into filetermap if key not present in the map * @ param subTree subject scheme definition element * @ param attName attribute name * @ param action action to insert */ private void insertAction ( final Element subTree , final QName attName , final Action action ) { } }
if ( subTree == null || action == null ) { return ; } final LinkedList < Element > queue = new LinkedList < > ( ) ; // Skip the sub - tree root because it has been added already . NodeList children = subTree . getChildNodes ( ) ; for ( int i = 0 ; i < children . getLength ( ) ; i ++ ) { if ( children . item ( i ) . getNodeType ( ) == Node . ELEMENT_NODE ) { queue . offer ( ( Element ) children . item ( i ) ) ; } } while ( ! queue . isEmpty ( ) ) { final Element node = queue . poll ( ) ; children = node . getChildNodes ( ) ; for ( int i = 0 ; i < children . getLength ( ) ; i ++ ) { if ( children . item ( i ) . getNodeType ( ) == Node . ELEMENT_NODE ) { queue . offer ( ( Element ) children . item ( i ) ) ; } } if ( SUBJECTSCHEME_SUBJECTDEF . matches ( node ) ) { final String key = node . getAttribute ( ATTRIBUTE_NAME_KEYS ) ; if ( key != null && ! key . trim ( ) . isEmpty ( ) ) { final FilterKey k = new FilterKey ( attName , key ) ; if ( ! filterMap . containsKey ( k ) ) { filterMap . put ( k , action ) ; } } } }
public class GeometryUtils { /** * Sets the geometry type ( in place ) for the given hive geometry bytes * @ param geomref reference to hive geometry bytes * @ param type OGC geometry type */ public static void setType ( BytesWritable geomref , OGCType type ) { } }
geomref . getBytes ( ) [ SIZE_WKID ] = ( byte ) type . getIndex ( ) ;
public class LinkType { /** * Gets whether the link should produce a shared library . * @ return boolean */ public boolean isSharedLibrary ( ) { } }
final String value = this . outputType . getValue ( ) ; // FREEHEP return value . equals ( "shared" ) || value . equals ( "plugin" ) || value . equals ( "jni" ) ;
public class MpScheduler { /** * aggregated / deduped here at the MPI . */ public void handleIv2InitiateTaskMessage ( Iv2InitiateTaskMessage message ) { } }
final String procedureName = message . getStoredProcedureName ( ) ; /* * If this is CL replay , use the txnid from the CL and use it to update the current txnid */ long mpTxnId ; // Timestamp is actually a pre - IV2ish style time based transaction id long timestamp = Long . MIN_VALUE ; // Update UID if it ' s for replay if ( message . isForReplay ( ) ) { timestamp = message . getUniqueId ( ) ; m_uniqueIdGenerator . updateMostRecentlyGeneratedUniqueId ( timestamp ) ; } else { timestamp = m_uniqueIdGenerator . getNextUniqueId ( ) ; } TxnEgo ego = advanceTxnEgo ( ) ; mpTxnId = ego . getTxnId ( ) ; final String threadName = Thread . currentThread ( ) . getName ( ) ; // Thread name has to be materialized here final VoltTrace . TraceEventBatch traceLog = VoltTrace . log ( VoltTrace . Category . MPI ) ; if ( traceLog != null ) { traceLog . add ( ( ) -> VoltTrace . meta ( "process_name" , "name" , CoreUtils . getHostnameOrAddress ( ) ) ) . add ( ( ) -> VoltTrace . meta ( "thread_name" , "name" , threadName ) ) . add ( ( ) -> VoltTrace . meta ( "thread_sort_index" , "sort_index" , Integer . toString ( 100 ) ) ) . add ( ( ) -> VoltTrace . beginAsync ( "initmp" , mpTxnId , "txnId" , TxnEgo . txnIdToString ( mpTxnId ) , "ciHandle" , message . getClientInterfaceHandle ( ) , "name" , procedureName , "read" , message . isReadOnly ( ) ) ) ; } // Don ' t have an SP HANDLE at the MPI , so fill in the unused value Iv2Trace . logIv2InitiateTaskMessage ( message , m_mailbox . getHSId ( ) , mpTxnId , Long . MIN_VALUE ) ; // Handle every - site system procedures ( at the MPI ) final Config sysprocConfig = SystemProcedureCatalog . listing . get ( procedureName ) ; if ( sysprocConfig != null && sysprocConfig . getEverysite ( ) ) { // Send an SP initiate task to all remote sites final Long localId = m_mailbox . getHSId ( ) ; Iv2InitiateTaskMessage sp = new Iv2InitiateTaskMessage ( localId , // make the MPI the initiator . message . getCoordinatorHSId ( ) , m_repairLogTruncationHandle , mpTxnId , timestamp , message . isReadOnly ( ) , true , // isSinglePartition null , message . getStoredProcedureInvocation ( ) , message . getClientInterfaceHandle ( ) , message . getConnectionId ( ) , message . isForReplay ( ) ) ; DuplicateCounter counter = new DuplicateCounter ( message . getInitiatorHSId ( ) , mpTxnId , m_iv2Masters , message ) ; safeAddToDuplicateCounterMap ( mpTxnId , counter ) ; EveryPartitionTask eptask = new EveryPartitionTask ( m_mailbox , m_pendingTasks , sp , m_iv2Masters ) ; m_pendingTasks . offer ( eptask ) ; return ; } // Create a copy so we can overwrite the txnID so the InitiateResponse will be // correctly tracked . Iv2InitiateTaskMessage mp = new Iv2InitiateTaskMessage ( message . getInitiatorHSId ( ) , message . getCoordinatorHSId ( ) , m_repairLogTruncationHandle , mpTxnId , timestamp , message . isReadOnly ( ) , message . isSinglePartition ( ) , null , message . getStoredProcedureInvocation ( ) , message . getClientInterfaceHandle ( ) , message . getConnectionId ( ) , message . isForReplay ( ) ) ; // Multi - partition initiation ( at the MPI ) MpProcedureTask task = null ; if ( isNpTxn ( message ) && NP_PROCEDURE_CLASS . hasProClass ( ) ) { Set < Integer > involvedPartitions = getBalancePartitions ( message ) ; if ( involvedPartitions != null ) { HashMap < Integer , Long > involvedPartitionMasters = Maps . newHashMap ( m_partitionMasters ) ; involvedPartitionMasters . keySet ( ) . retainAll ( involvedPartitions ) ; task = instantiateNpProcedureTask ( m_mailbox , procedureName , m_pendingTasks , mp , involvedPartitionMasters , m_buddyHSIds . get ( m_nextBuddy ) , false , m_hostId ) ; } // if cannot figure out the involved partitions , run it as an MP txn } int [ ] nPartitionIds = message . getNParitionIds ( ) ; if ( nPartitionIds != null ) { HashMap < Integer , Long > involvedPartitionMasters = new HashMap < > ( ) ; for ( int partitionId : nPartitionIds ) { involvedPartitionMasters . put ( partitionId , m_partitionMasters . get ( partitionId ) ) ; } task = instantiateNpProcedureTask ( m_mailbox , procedureName , m_pendingTasks , mp , involvedPartitionMasters , m_buddyHSIds . get ( m_nextBuddy ) , false , m_hostId ) ; } if ( task == null ) { task = new MpProcedureTask ( m_mailbox , procedureName , m_pendingTasks , mp , m_iv2Masters , m_partitionMasters , m_buddyHSIds . get ( m_nextBuddy ) , false , m_hostId , false ) ; } m_nextBuddy = ( m_nextBuddy + 1 ) % m_buddyHSIds . size ( ) ; m_outstandingTxns . put ( task . m_txnState . txnId , task . m_txnState ) ; m_pendingTasks . offer ( task ) ;
public class JdbcPatternUtil { /** * 兼容 { } 内出现下划线的情况 */ public static String bareError ( String pattern ) { } }
List < String > camelKeys = getCamelKeys ( pattern ) , orignalKeys = getOriginalKeys ( pattern ) ; for ( int i = 0 ; i < camelKeys . size ( ) ; i ++ ) { pattern = pattern . replaceFirst ( "\\{" + orignalKeys . get ( i ) + "\\}" , "{" + camelKeys . get ( i ) + "}" ) ; } return pattern ;
public class AmazonAlexaForBusinessClient { /** * Determines the details for the room from which a skill request was invoked . This operation is used by skill * developers . * @ param resolveRoomRequest * @ return Result of the ResolveRoom operation returned by the service . * @ throws NotFoundException * The resource is not found . * @ sample AmazonAlexaForBusiness . ResolveRoom * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / alexaforbusiness - 2017-11-09 / ResolveRoom " target = " _ top " > AWS * API Documentation < / a > */ @ Override public ResolveRoomResult resolveRoom ( ResolveRoomRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeResolveRoom ( request ) ;
public class BaseBroadcastBoolOp { /** * Calculate the output shape for this op * @ return */ public List < LongShapeDescriptor > calculateOutputShape ( ) { } }
if ( x == null || y == null ) return Collections . emptyList ( ) ; long [ ] shapeX = x . shape ( ) ; long [ ] shapeY = y . shape ( ) ; return Collections . singletonList ( LongShapeDescriptor . fromShape ( Shape . broadcastOutputShape ( shapeX , shapeY ) , DataType . BOOL ) ) ;
public class SeaGlassSynthPainterImpl { /** * Paints the border of a tab of a tabbed pane . This implementation invokes * the method of the same name without the orientation . * @ param context SynthContext identifying the < code > JComponent < / code > * and < code > Region < / code > to paint to * @ param g < code > Graphics < / code > to paint to * @ param x X coordinate of the area to paint to * @ param y Y coordinate of the area to paint to * @ param w Width of the area to paint to * @ param h Height of the area to paint to * @ param tabIndex Index of tab being painted . * @ param orientation One of < code > JTabbedPane . TOP < / code > , < code > * JTabbedPane . LEFT < / code > , < code > * JTabbedPane . BOTTOM < / code > , or < code > * JTabbedPane . RIGHT < / code > */ public void paintTabbedPaneTabBorder ( SynthContext context , Graphics g , int x , int y , int w , int h , int tabIndex , int orientation ) { } }
paintBorder ( context , g , x , y , w , h , null ) ;
public class ListTrainingJobsResult { /** * An array of < code > TrainingJobSummary < / code > objects , each listing a training job . * @ param trainingJobSummaries * An array of < code > TrainingJobSummary < / code > objects , each listing a training job . */ public void setTrainingJobSummaries ( java . util . Collection < TrainingJobSummary > trainingJobSummaries ) { } }
if ( trainingJobSummaries == null ) { this . trainingJobSummaries = null ; return ; } this . trainingJobSummaries = new java . util . ArrayList < TrainingJobSummary > ( trainingJobSummaries ) ;
public class TaskServiceImpl { /** * Ack Task is received . * @ param taskId Id of the task * @ param workerId Id of the worker * @ return ` true | false ` if task if received or not */ @ Service public String ackTaskReceived ( String taskId , String workerId ) { } }
LOGGER . debug ( "Ack received for task: {} from worker: {}" , taskId , workerId ) ; return String . valueOf ( ackTaskReceived ( taskId ) ) ;
public class Client { /** * Helper method to rollback the partition state and stop / restart the stream . * The stream is stopped ( if not already done ) . Then : * The rollback seqno state is applied . Note that this will also remove all the failover logs for the partition * that are higher than the given seqno , since the server told us we are ahead of it . * Finally , the stream is restarted again . * @ param partition the partition id * @ param seqno the sequence number to rollback to */ public Completable rollbackAndRestartStream ( final short partition , final long seqno ) { } }
return stopStreaming ( partition ) . andThen ( Completable . create ( new Completable . OnSubscribe ( ) { @ Override public void call ( CompletableSubscriber subscriber ) { sessionState ( ) . rollbackToPosition ( partition , seqno ) ; subscriber . onCompleted ( ) ; } } ) ) . andThen ( startStreaming ( partition ) ) ;
public class InternationalFixedDate { /** * Obtains the current { @ code InternationalFixedDate } from the specified clock . * This will query the specified clock to obtain the current date - today . * Using this method allows the use of an alternate clock for testing . * The alternate clock may be introduced using { @ linkplain Clock dependency injection } . * @ param clock the clock to use , not null * @ return the current date , not null * @ throws DateTimeException if the current date cannot be obtained */ public static InternationalFixedDate now ( Clock clock ) { } }
LocalDate now = LocalDate . now ( clock ) ; return InternationalFixedDate . ofEpochDay ( now . toEpochDay ( ) ) ;
public class MethodInvocation { /** * Factory method used to construct a new instance of { @ link MethodInvocation } initialized with * the given { @ link Method } and array of { @ link Object arguments } passed to the { @ link Method } * during invocation . * The { @ link Method } is expected to be a { @ link java . lang . reflect . Modifier # STATIC } , * { @ link Class } member { @ link Method } . * @ param method { @ link Method } to invoke . * @ param args array of { @ link Object arguments } to pass to the { @ link Method } during invocation . * @ return an instance of { @ link MethodInvocation } encapsulating all the necessary details * to invoke the { @ link java . lang . reflect . Modifier # STATIC } { @ link Method } . * @ see # newMethodInvocation ( Object , Method , Object . . . ) * @ see java . lang . reflect . Method */ public static MethodInvocation newMethodInvocation ( Method method , Object ... args ) { } }
return newMethodInvocation ( null , method , args ) ;
public class DatatypeConverter { /** * Print an extended attribute value . * @ param writer parent MSPDIWriter instance * @ param value attribute value * @ param type type of the value being passed * @ return string representation */ public static final String printExtendedAttribute ( MSPDIWriter writer , Object value , DataType type ) { } }
String result ; if ( type == DataType . DATE ) { result = printExtendedAttributeDate ( ( Date ) value ) ; } else { if ( value instanceof Boolean ) { result = printExtendedAttributeBoolean ( ( Boolean ) value ) ; } else { if ( value instanceof Duration ) { result = printDuration ( writer , ( Duration ) value ) ; } else { if ( type == DataType . CURRENCY ) { result = printExtendedAttributeCurrency ( ( Number ) value ) ; } else { if ( value instanceof Number ) { result = printExtendedAttributeNumber ( ( Number ) value ) ; } else { result = value . toString ( ) ; } } } } } return ( result ) ;
public class ObjectMappableProcessor { /** * Get the package name of a certain clazz * @ param clazz The class you want the packagename for * @ return The package name */ private String getPackageName ( ObjectMappableAnnotatedClass clazz ) { } }
PackageElement pkg = elements . getPackageOf ( clazz . getElement ( ) ) ; return pkg . isUnnamed ( ) ? "" : pkg . getQualifiedName ( ) . toString ( ) ;
public class BucketTreeStack { /** * Trims the capacity of this < tt > BucketArrayList < / tt > instance to be the * list ' s current size . An application can use this operation to minimize * the storage of an < tt > BucketArrayList < / tt > instance . */ public void trimToSize ( ) { } }
modCount ++ ; while ( ( bucketDepth > 0 ) && ( bucketSize << ( ( bucketDepth - 1 ) * bucketExp ) >= size ) ) { bucket = ( Object [ ] ) bucket [ 0 ] ; }
public class RuleBasedOptimizer { /** * Method that is used to add the indexing rules to the rule stack . This method can be overridden by subclasses when custom * indexing rules are to be used . By default , this method simply adds the { @ link AddIndexes } rule . * @ param ruleStack the stack where the rules should be placed ; never null * @ param hints the plan hints */ protected void populateIndexingRules ( LinkedList < OptimizerRule > ruleStack , PlanHints hints ) { } }
ruleStack . addLast ( AddIndexes . implicitIndexes ( ) ) ;
public class UpdateGroupQueryRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( UpdateGroupQueryRequest updateGroupQueryRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( updateGroupQueryRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( updateGroupQueryRequest . getGroupName ( ) , GROUPNAME_BINDING ) ; protocolMarshaller . marshall ( updateGroupQueryRequest . getResourceQuery ( ) , RESOURCEQUERY_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class SearchPortletController { /** * Display AJAX autocomplete search results for the last query */ @ ResourceMapping ( value = "retrieveSearchJSONResults" ) public ModelAndView showJSONSearchResults ( PortletRequest request ) { } }
PortletPreferences prefs = request . getPreferences ( ) ; int maxTextLength = Integer . parseInt ( prefs . getValue ( AUTOCOMPLETE_MAX_TEXT_LENGTH_PREF_NAME , "180" ) ) ; final Map < String , Object > model = new HashMap < > ( ) ; List < AutocompleteResultsModel > results = new ArrayList < > ( ) ; final PortletSession session = request . getPortletSession ( ) ; String queryId = ( String ) session . getAttribute ( SEARCH_LAST_QUERY_ID ) ; if ( queryId != null ) { final PortalSearchResults portalSearchResults = this . getPortalSearchResults ( request , queryId ) ; if ( portalSearchResults != null ) { final ConcurrentMap < String , List < Tuple < SearchResult , String > > > resultsMap = portalSearchResults . getResults ( ) ; results = collateResultsForAutoCompleteResponse ( resultsMap , maxTextLength ) ; } } model . put ( "results" , results ) ; model . put ( "count" , results . size ( ) ) ; return new ModelAndView ( "json" , model ) ;
public class SQLPPMappingToR2RMLConverter { /** * the method to write the R2RML mappings * from an rdf Model to a file * @ param file the ttl file to write to */ public void write ( File file ) throws Exception { } }
try { FileOutputStream fos = new FileOutputStream ( file ) ; write ( fos ) ; } catch ( Exception e ) { e . printStackTrace ( ) ; throw e ; }
public class EJSHome { /** * This method creates and returns a new < code > BeanO < / code > instance * appropriate for this home . < p > * The returned < code > BeanO < / code > has a newly created enterprise * bean instance associated with it , and the enterprise bean instance * has had its set . . . Context ( ) method called on it to set its context * to the returned < code > BeanO < / code > . < p > * This method must only be called when a new < code > BeanO < / code > * instance is needed . It always creates a new < code > BeanO < / code > * instance and a new instance of the associated enterprise bean . < p > * For CMP beans it is necessary to reset CMP fields to Java defaults * at this time . Our solution is to create a new instance each time * a create is invoked on the CMP home . This instance is added to the * pool during passivate and only if the pool size is less than the * the max else it is discarded . This approach avoids messy reflection * API usage for resetting . < p > * If this method returns successfully , then { @ link EJBThreadData # pushCallbackBeanO } will have been called , and the * caller must ensure that { @ link EJBThreadData # popCallbackBeanO } is * eventually called . < p > * @ param threadData the < code > EJBThreadData < / code > associated with the * currently running thread * @ param tx the < code > ContainerTx < / code > to associate with the newly * created < code > BeanO < / code > < p > * @ param activate true if the created BeanO will be used for bean * activation , false for bean creation . * @ param context the context for creating the bean , or null * @ return newly created < code > BeanO < / code > associated with a newly * created bean instance of type of beans managed by this * home < p > */ private BeanO createBeanO ( EJBThreadData threadData , ContainerTx tx , boolean activate , ManagedObjectContext context ) throws RemoteException { } }
final boolean isTraceOn = TraceComponent . isAnyTracingEnabled ( ) ; // d532639.2 if ( isTraceOn && tc . isEntryEnabled ( ) ) // d367572.7 Tr . entry ( tc , "createBeanO(ContainerTx, activate) activate = " + activate ) ; homeEnabled ( ) ; BeanO result = null ; // For Stateless Session beans , the number of ' active ' beans may // be limited . The call to allocateBeanO will ' assign ' one to this // thread and either return one from the pool , or return null to // indicate the pool is empty , but the limit is not currently in use // so one should be created . allocateBeanO will block and control will // NOT be returned until a BeanO has been allocated . . . or the attempt // has timed out , in which case an Exception will be thrown . PK20648 if ( beanMetaData . ivMaxCreation > 0 ) { result = allocateBeanO ( tx ) ; } // F743-509 . CodRev // Max creation limit is zero . Determine if is a Singleton session home . // A singleton session home never has a max creation limit since there // never is more than 1 Singleton instance created . else if ( ivSingletonSessionHome ) // d565527 { result = createSingletonBeanO ( ) ; // F743-1753 } // F743-509 . CodRev // Neither a Stateless with a max creation limit nor a Singleton session bean // home . If there is a bean pool , get an instance from the pool . Otherwise , // fall through and create a new one . else if ( beanPool != null ) { result = ( BeanO ) beanPool . get ( ) ; } if ( result == null ) { // Create a new instance . if ( statefulSessionHome && activate ) // d367572.7 { try { result = beanOFactory . create ( container , this , true ) ; // d367572.7 } catch ( InvocationTargetException e ) { // we don ' t create the instance when reactivating , so ITE should not be thrown FFDCFilter . processException ( e , CLASS_NAME + ".createBeanO" , "960" , this ) ; throw new IllegalStateException ( e ) ; } } else { long createStartTime = - 1 ; try { // For Stateless and MessageDriven , create count is the same // as instantiation count . For these types , create time should // include creating the instance and calling any lifecycle // callbacks . d626533.1 if ( pmiBean != null && ( statelessSessionHome || messageDrivenHome ) ) { createStartTime = pmiBean . initialTime ( EJBPMICollaborator . CREATE_RT ) ; } result = beanOFactory . create ( container , this , false ) ; } catch ( InvocationTargetException e ) { FFDCFilter . processException ( e , CLASS_NAME + ".createBeanO" , "977" , this ) ; throw new RemoteException ( enterpriseBeanClass . getName ( ) , e . getCause ( ) ) ; } finally { // Even if the create fails , go ahead and add the time , so // the number of times counted matches the create count . if ( createStartTime > - 1 ) { pmiBean . finalTime ( EJBPMICollaborator . CREATE_RT , createStartTime ) ; } } } } else if ( ! activate && beanMetaData . type == InternalConstants . TYPE_CONTAINER_MANAGED_ENTITY && beanMetaData . cmpResetFields != null ) { // Bean was obtained from pool and needs CMP instance values reset // to Java defaults EnterpriseBean b = result . getEnterpriseBean ( ) ; for ( int i = 0 ; i < beanMetaData . cmpResetFields . length ; ++ i ) { try { java . lang . reflect . Field f = beanMetaData . cmpResetFields [ i ] ; Class < ? > clzz = f . getType ( ) ; if ( clzz . isPrimitive ( ) ) { if ( clzz == Long . TYPE ) { f . setLong ( b , 0 ) ; } else if ( clzz == Integer . TYPE ) { f . setInt ( b , 0 ) ; } else if ( clzz == Boolean . TYPE ) { f . setBoolean ( b , false ) ; } else if ( clzz == Short . TYPE ) { f . setShort ( b , ( short ) 0 ) ; } else if ( clzz == Byte . TYPE ) { f . setByte ( b , ( byte ) 0 ) ; } else if ( clzz == Character . TYPE ) { f . setChar ( b , ( char ) 0 ) ; } else if ( clzz == Double . TYPE ) { f . setDouble ( b , 0 ) ; } else if ( clzz == Float . TYPE ) { f . setFloat ( b , 0 ) ; } } else { f . set ( b , null ) ; } } catch ( IllegalAccessException iae ) { FFDCFilter . processException ( iae , CLASS_NAME + ".createBeanO" , "598" , this ) ; throw new ContainerException ( "Problem occurred resetting CMP fields to Java default values" , iae ) ; } } } // Set the created / found BeanO as the ' Callback ' BeanO , as this is the // BeanO that is becoming the active beanO for the thread . // This will allow methods called by customer code ( like Timer methods ) // to determine the state of the BeanO that is making the call d168509 threadData . pushCallbackBeanO ( result ) ; // d630940 if ( isTraceOn && tc . isEntryEnabled ( ) ) // d367572.7 d402055 Tr . exit ( tc , "createBeanO(ContainerTx, activate) activate = " + activate ) ; return result ;
public class JQMListItem { /** * Sets the image to be used to the given source url . * < br > The same as setImage ( ) , but image is marked as thumbnail class . */ public void setThumbnail ( String src ) { } }
setImage ( src ) ; if ( imageElem != null ) { imageElem . removeClassName ( "jqm4gwt-listitem-icon" ) ; imageElem . addClassName ( "jqm4gwt-listitem-thumb" ) ; }
public class CmsPropertyChange { /** * Sets the given property with the given value to the given resource * ( potentially recursiv ) if it has not been set before . < p > * Returns a list with all sub resources that have been modified this way . < p > * @ param resourceRootPath the resource on which property definition values are changed * @ param propertyDefinition the name of the propertydefinition to change the value * @ param newValue the new value of the propertydefinition * @ param recursive if true , change recursively all property values on sub - resources ( only for folders ) * @ return a list with the < code > { @ link CmsResource } < / code > ' s where the property value has been changed * @ throws CmsVfsException for now only when the search for the oldvalue failed . * @ throws CmsException if operation was not successful */ private List setPropertyInFolder ( String resourceRootPath , String propertyDefinition , String newValue , boolean recursive ) throws CmsException , CmsVfsException { } }
CmsObject cms = getCms ( ) ; // collect the resources to look up List resources = new ArrayList ( ) ; if ( recursive ) { resources = cms . readResources ( resourceRootPath , CmsResourceFilter . IGNORE_EXPIRATION ) ; } else { resources . add ( resourceRootPath ) ; } List changedResources = new ArrayList ( resources . size ( ) ) ; CmsProperty newProperty = new CmsProperty ( propertyDefinition , null , null ) ; // create permission set and filter to check each resource for ( int i = 0 ; i < resources . size ( ) ; i ++ ) { // loop through found resources and check property values CmsResource res = ( CmsResource ) resources . get ( i ) ; CmsProperty property = cms . readPropertyObject ( res , propertyDefinition , false ) ; if ( property . isNullProperty ( ) ) { // change structure value newProperty . setStructureValue ( newValue ) ; newProperty . setName ( propertyDefinition ) ; cms . writePropertyObject ( cms . getRequestContext ( ) . removeSiteRoot ( res . getRootPath ( ) ) , newProperty ) ; changedResources . add ( res ) ; } else { // nop } } return changedResources ;
public class UnboundTypeReference { /** * Create a new , managed unbound type reference for the given type parameter which was * first encountered for the given expression . * @ param expression the expression that used / referenced the type parameter * @ param typeParameter the type parameter * @ param expectation the decision path that uses the type parameter */ public static UnboundTypeReference create ( ITypeExpectation expectation , XExpression expression , JvmTypeParameter typeParameter ) { } }
return expectation . createUnboundTypeReference ( expression , typeParameter ) ;
public class Transliterator { /** * Finishes any pending transliterations that were waiting for * more characters . Clients should call this method as the last * call after a sequence of one or more calls to * < code > transliterate ( ) < / code > . * @ param text the buffer holding transliterated and * untransliterated text . * @ param index the array of indices previously passed to { @ link * # transliterate } */ public final void finishTransliteration ( Replaceable text , Position index ) { } }
index . validate ( text . length ( ) ) ; filteredTransliterate ( text , index , false , true ) ;
public class AmazonEnvironmentAwareClientBuilder { /** * Gets setting . * @ param key the key * @ param defaultValue the default value * @ return the setting */ public String getSetting ( final String key , final String defaultValue ) { } }
val result = environment . getProperty ( this . propertyPrefix + '.' + key ) ; return StringUtils . defaultIfBlank ( result , defaultValue ) ;
public class GobblinYarnLogSource { /** * Multiple directories may be specified in the LOG _ DIRS string . Split them up and return a list of { @ link Path } s . * @ return list of { @ link Path } s to the log directories * @ throws IOException */ private List < Path > getLocalLogDirs ( ) throws IOException { } }
String logDirs = System . getenv ( ApplicationConstants . Environment . LOG_DIRS . toString ( ) ) ; return COMMA_SPLITTER . splitToList ( logDirs ) . stream ( ) . map ( e -> new Path ( e ) ) . collect ( Collectors . toList ( ) ) ;
public class CsvReader { /** * Configures the reader to read the CSV data and parse it to the given type . The type must be a subclass of * { @ link Tuple } . The type information for the fields is obtained from the type class . The type * consequently needs to specify all generic field types of the tuple . * @ param targetType The class of the target type , needs to be a subclass of Tuple . * @ return The DataSet representing the parsed CSV data . */ public < T extends Tuple > DataSource < T > tupleType ( Class < T > targetType ) { } }
Preconditions . checkNotNull ( targetType , "The target type class must not be null." ) ; if ( ! Tuple . class . isAssignableFrom ( targetType ) ) { throw new IllegalArgumentException ( "The target type must be a subclass of " + Tuple . class . getName ( ) ) ; } @ SuppressWarnings ( "unchecked" ) TupleTypeInfo < T > typeInfo = ( TupleTypeInfo < T > ) TypeExtractor . createTypeInfo ( targetType ) ; CsvInputFormat < T > inputFormat = new TupleCsvInputFormat < T > ( path , this . lineDelimiter , this . fieldDelimiter , typeInfo , this . includedMask ) ; Class < ? > [ ] classes = new Class < ? > [ typeInfo . getArity ( ) ] ; for ( int i = 0 ; i < typeInfo . getArity ( ) ; i ++ ) { classes [ i ] = typeInfo . getTypeAt ( i ) . getTypeClass ( ) ; } configureInputFormat ( inputFormat ) ; return new DataSource < T > ( executionContext , inputFormat , typeInfo , Utils . getCallLocationName ( ) ) ;
public class AstyanaxBlockedDataReaderDAO { /** * Decodes rows returned by scanning across tables . */ private Iterator < MultiTableScanResult > scanMultiTableRows ( final TableSet tables , final DeltaPlacement placement , final ByteBufferRange rowRange , final LimitCounter limit , final boolean includeDroppedTables , final boolean includeMirrorTables , final int largeRowThreshold , final ReadConsistency consistency , @ Nullable final Instant cutoffTime ) { } }
// Avoiding pinning multiple decoded rows into memory at once . return limit . limit ( new AbstractIterator < MultiTableScanResult > ( ) { private PeekingIterator < Row < ByteBuffer , DeltaKey > > _iter = Iterators . peekingIterator ( rowScan ( placement , rowRange , _maxColumnsRange , LimitCounter . max ( ) , consistency ) ) ; private long _lastTableUuid = - 1 ; private AstyanaxTable _table = null ; private boolean _droppedTable ; private boolean _primaryTable ; @ Override protected MultiTableScanResult computeNext ( ) { while ( _iter . hasNext ( ) ) { Row < ByteBuffer , DeltaKey > row = _iter . next ( ) ; ColumnList < DeltaKey > rowColumns = row . getColumns ( ) ; // Convert the results into a Record object , lazily fetching the rest of the columns as necessary . ByteBuffer rowKey = row . getRawKey ( ) ; long tableUuid = AstyanaxStorage . getTableUuid ( rowKey ) ; if ( _lastTableUuid != tableUuid ) { _lastTableUuid = tableUuid ; try { _table = ( AstyanaxTable ) tables . getByUuid ( tableUuid ) ; } catch ( UnknownTableException e ) { _table = AstyanaxTable . createUnknown ( tableUuid , placement , e . getTable ( ) ) ; } catch ( DroppedTableException e ) { _table = AstyanaxTable . createUnknown ( tableUuid , placement , e . getPriorTable ( ) ) ; } _droppedTable = _table . isUnknownTable ( ) ; _primaryTable = _table . getReadStorage ( ) . hasUUID ( tableUuid ) ; } // Skip dropped and mirror tables if configured if ( ( ! includeDroppedTables && _droppedTable ) || ( ! includeMirrorTables && ! _primaryTable ) ) { _iter = skipToNextTable ( tableUuid ) ; continue ; } int shardId = AstyanaxStorage . getShardId ( rowKey ) ; String key = AstyanaxStorage . getContentKey ( rowKey ) ; Record record = newRecord ( new Key ( _table , key ) , rowKey , rowColumns , largeRowThreshold , consistency , cutoffTime ) ; return new MultiTableScanResult ( rowKey , shardId , tableUuid , _droppedTable , record ) ; } return endOfData ( ) ; } private PeekingIterator < Row < ByteBuffer , DeltaKey > > skipToNextTable ( long tableUuid ) { // Iterate over the next 50 rows first to check for a table switch . This avoids starting a new range // query if the number of rows in the undesired table is small . int skipLimit = 50 ; Row < ByteBuffer , DeltaKey > row = null ; while ( skipLimit != 0 && _iter . hasNext ( ) ) { row = _iter . peek ( ) ; long nextTableUuid = AstyanaxStorage . getTableUuid ( row . getRawKey ( ) ) ; if ( nextTableUuid != tableUuid ) { // This is the first row of a new table return _iter ; } else { _iter . next ( ) ; skipLimit -= 1 ; } } if ( _iter . hasNext ( ) ) { // Skip the table entirely by starting a new query on the next possible table assert row != null ; int shardId = AstyanaxStorage . getShardId ( row . getRawKey ( ) ) ; ByteBuffer nextPossibleTableStart = AstyanaxStorage . getRowKeyRaw ( shardId , tableUuid + 1 , "" ) ; ByteBuffer end = rowRange . getEnd ( ) ; if ( AstyanaxStorage . compareKeys ( nextPossibleTableStart , end ) < 0 ) { // We haven ' t reached the last end boundary of the original range scan ByteBufferRange updatedRange = new ByteBufferRangeImpl ( nextPossibleTableStart , end , - 1 , false ) ; return Iterators . peekingIterator ( rowScan ( placement , updatedRange , _maxColumnsRange , LimitCounter . max ( ) , consistency ) ) ; } } return Iterators . peekingIterator ( Iterators . < Row < ByteBuffer , DeltaKey > > emptyIterator ( ) ) ; } } ) ;
public class LCharToLongFuncDelta { /** * < editor - fold desc = " object " > */ public static boolean argEquals ( LCharToLongFuncDelta the , Object that ) { } }
return Null . < LCharToLongFuncDelta > equals ( the , that , ( one , two ) -> { if ( one . getClass ( ) != two . getClass ( ) ) { return false ; } LCharToLongFuncDelta other = ( LCharToLongFuncDelta ) two ; return LBiObjLongTriple . argEquals ( one . function , one . deltaFunction , one . lastValue ( ) , other . function , other . deltaFunction , other . lastValue ( ) ) ; } ) ;
public class BuiltInErrorProducer { /** * Called from the error handling config instance , after a configuration update or initial configuration . * @ param errorHandlingConfig */ public void afterConfiguration ( ErrorHandlingConfig errorHandlingConfig ) { } }
if ( ! errorProducerEnabled ) return ; this . errorHandlingConfig = errorHandlingConfig ; // first create default - catchers ErrorCatcherConfig [ ] defaultCatcherConfigs = errorHandlingConfig . getDefaultCatchers ( ) ; if ( defaultCatcherConfigs != null && defaultCatcherConfigs . length > 0 ) { defaultCatchers = new CopyOnWriteArrayList < > ( ) ; for ( ErrorCatcherConfig c : defaultCatcherConfigs ) { defaultCatchers . add ( ErrorCatcherFactory . createErrorCatcher ( c ) ) ; } } // now create regular catchers ErrorCatcherConfig [ ] catcherConfigs = errorHandlingConfig . getCatchers ( ) ; if ( catcherConfigs != null && catcherConfigs . length > 0 ) { catchers = new ConcurrentHashMap < > ( ) ; for ( ErrorCatcherConfig c : catcherConfigs ) { ErrorCatcher catcher = ErrorCatcherFactory . createErrorCatcher ( c ) ; List < ErrorCatcher > catcherList = catchers . get ( c . getExceptionClazz ( ) ) ; if ( catcherList == null ) { catcherList = new LinkedList < > ( ) ; catchers . put ( c . getExceptionClazz ( ) , catcherList ) ; } catcherList . add ( catcher ) ; } }
public class Marc { /** * Transform W3C document of the record in the ISO 2709 input stream by an XSL stylesheet . * @ param stylesheetUrl the URL of the stylesheet * @ param result the result of the transformation * @ throws IOException if transformation fails */ public void transform ( URL stylesheetUrl , Result result ) throws IOException { } }
transform ( TransformerFactory . newInstance ( ) , stylesheetUrl , result ) ;
public class StorePackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public EClass getBimServerInfo ( ) { } }
if ( bimServerInfoEClass == null ) { bimServerInfoEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( StorePackage . eNS_URI ) . getEClassifiers ( ) . get ( 88 ) ; } return bimServerInfoEClass ;
public class DeviceFinder { /** * Remove any device announcements that are so old that the device seems to have gone away . */ private void expireDevices ( ) { } }
long now = System . currentTimeMillis ( ) ; // Make a copy so we don ' t have to worry about concurrent modification . Map < InetAddress , DeviceAnnouncement > copy = new HashMap < InetAddress , DeviceAnnouncement > ( devices ) ; for ( Map . Entry < InetAddress , DeviceAnnouncement > entry : copy . entrySet ( ) ) { if ( now - entry . getValue ( ) . getTimestamp ( ) > MAXIMUM_AGE ) { devices . remove ( entry . getKey ( ) ) ; deliverLostAnnouncement ( entry . getValue ( ) ) ; } } if ( devices . isEmpty ( ) ) { firstDeviceTime . set ( 0 ) ; // We have lost contact with the Pro DJ Link network , so start over with next device . }
public class WrappingUtils { /** * Updates the overlay - color rounding of the parent ' s child drawable . * < ul > * < li > If rounding mode is OVERLAY _ COLOR and the child is not a RoundedCornersDrawable , * a new RoundedCornersDrawable is created and the child gets wrapped with it . * < li > If rounding mode is OVERLAY _ COLOR and the child is already wrapped with a * RoundedCornersDrawable , its rounding parameters are updated . * < li > If rounding mode is not OVERLAY _ COLOR and the child is wrapped with a * RoundedCornersDrawable , the rounded drawable gets removed and its child gets * attached directly to the parent . * < / ul > */ static void updateOverlayColorRounding ( DrawableParent parent , @ Nullable RoundingParams roundingParams ) { } }
Drawable child = parent . getDrawable ( ) ; if ( roundingParams != null && roundingParams . getRoundingMethod ( ) == RoundingParams . RoundingMethod . OVERLAY_COLOR ) { // Overlay rounding requested - either update the overlay params or add a new // drawable that will do the requested rounding . if ( child instanceof RoundedCornersDrawable ) { RoundedCornersDrawable roundedCornersDrawable = ( RoundedCornersDrawable ) child ; applyRoundingParams ( roundedCornersDrawable , roundingParams ) ; roundedCornersDrawable . setOverlayColor ( roundingParams . getOverlayColor ( ) ) ; } else { // Important : remove the child before wrapping it with a new parent ! child = parent . setDrawable ( sEmptyDrawable ) ; child = maybeWrapWithRoundedOverlayColor ( child , roundingParams ) ; parent . setDrawable ( child ) ; } } else if ( child instanceof RoundedCornersDrawable ) { // Overlay rounding no longer required so remove drawable that was doing the rounding . RoundedCornersDrawable roundedCornersDrawable = ( RoundedCornersDrawable ) child ; // Important : remove the child before wrapping it with a new parent ! child = roundedCornersDrawable . setCurrent ( sEmptyDrawable ) ; parent . setDrawable ( child ) ; // roundedCornersDrawable is removed and will get garbage collected , clear the child callback sEmptyDrawable . setCallback ( null ) ; }
public class AfplibPackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public EClass getEDI ( ) { } }
if ( ediEClass == null ) { ediEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( AfplibPackage . eNS_URI ) . getEClassifiers ( ) . get ( 239 ) ; } return ediEClass ;
public class IOUtil { /** * Reads the entries of a Header and returns a map containing the * { @ link HeaderKey } as key and { @ link StandardField } as value . * The map is initialized with all possible HeaderKeys of the subtype and * empty fields . * The passed instances must not be null and the specName must not be empty . * @ param clazz * the concrete subclass of the HeaderKey * @ param specFormat * the format of the specification file * @ param specification * the specification to be used for reading the fields * @ param headerbytes * the bytes of the header * @ param headerOffset * the file offset to the start of the headerbytes * @ param < T > * the type for the header key that the returned map shall use * @ return header entries */ public static < T extends Enum < T > & HeaderKey > Map < T , StandardField > readHeaderEntries ( Class < T > clazz , SpecificationFormat specFormat , List < String [ ] > specification , byte [ ] headerbytes , long headerOffset ) { } }
assert clazz != null && specFormat != null && headerbytes != null ; /* initializers */ // init a full map with default fields . Fields that can be read are // changed subsequently Map < T , StandardField > data = initFullEnumMap ( clazz ) ; // use the specification format to get the right indices int descriptionIndex = specFormat . description ; int offsetIndex = specFormat . offset ; int lengthIndex = specFormat . length ; int keyIndex = specFormat . key ; // loop through every line in the specification , put read data to the // map for ( String [ ] specs : specification ) { // get the enum type for the key string T key = Enum . valueOf ( clazz , specs [ keyIndex ] . trim ( ) ) ; // read offset , length , and description , offset is relative to // header int offset = Integer . parseInt ( specs [ offsetIndex ] . trim ( ) ) ; int length = Integer . parseInt ( specs [ lengthIndex ] . trim ( ) ) ; String description = specs [ descriptionIndex ] ; // get the absolute file offset for the current field long fieldOffset = headerOffset + offset ; // check if value is entirely contained in the headerbytes long value = 0 ; if ( headerbytes . length >= offset + length ) { value = getBytesLongValue ( headerbytes , offset , length ) ; data . put ( key , new StandardField ( key , description , value , fieldOffset , length ) ) ; } else { // value not entirely contained in array , so use a safe method // to fetch it value = getBytesLongValueSafely ( headerbytes , offset , length ) ; // . . . and print a warning message logger . warn ( "offset + length larger than headerbytes given" ) ; } // add data to map data . put ( key , new StandardField ( key , description , value , fieldOffset , length ) ) ; } assert data != null ; return data ;
public class S3StorageObjectMetadata { /** * Adds the key value pair of custom user - metadata for the associated object . * @ param key the key of user metadata * @ param value the value of user metadata */ @ Override public void addUserMetadata ( String key , String value ) { } }
this . s3Metadata . addUserMetadata ( key , value ) ;
public class ConversationManager { /** * region Metadata */ private ConversationMetadata resolveMetadata ( ) throws ConversationMetadataLoadException { } }
checkConversationQueue ( ) ; try { // attempt to load the encrypted metadata file File metaFile = new File ( conversationsStorageDir , CONVERSATION_METADATA_FILE ) ; if ( metaFile . exists ( ) ) { ApptentiveLog . v ( CONVERSATION , "Loading metadata file: %s" , metaFile ) ; return ObjectSerialization . deserialize ( metaFile , ConversationMetadata . class , encryptionKey ) ; } // attempt to load the legacy metadata file metaFile = new File ( conversationsStorageDir , CONVERSATION_METADATA_FILE_LEGACY_V1 ) ; if ( metaFile . exists ( ) ) { ApptentiveLog . v ( CONVERSATION , "Loading legacy v1 metadata file: %s" , metaFile ) ; try { return ObjectSerialization . deserialize ( metaFile , ConversationMetadata . class ) ; } finally { // we need to delete the legacy file to avoid the data being loaded next time boolean fileDeleted = metaFile . delete ( ) ; ApptentiveLog . v ( CONVERSATION , "Legacy metadata file deleted: %b" , fileDeleted ) ; } } ApptentiveLog . v ( CONVERSATION , "No metadata files" ) ; } catch ( Exception e ) { ApptentiveLog . e ( CONVERSATION , e , "Exception while loading conversation metadata" ) ; logException ( e ) ; // if we fail to load the metadata - we would not create a new one - just throw an exception throw new ConversationMetadataLoadException ( "Unable to load metadata" , e ) ; } return new ConversationMetadata ( ) ;
public class VcfVariantAnnotator { /** * Updates VariantAnnotation objects in variantAnnotationList . * @ param variantList List of Variant objects . variantList and variantAnnotationList must contain variants in the * SAME order : variantAnnotation at position i must correspond to variant i */ public void run ( List < Variant > variantList ) { } }
for ( int i = 0 ; i < variantList . size ( ) ; i ++ ) { Map < String , AdditionalAttribute > customAnnotation = getCustomAnnotation ( variantList . get ( i ) ) ; // Update only if there are annotations for this variant . customAnnotation may be empty if the variant // exists in the vcf but the info field does not contain any of the required attributes if ( customAnnotation != null && customAnnotation . get ( fileId ) . getAttribute ( ) . size ( ) > 0 ) { VariantAnnotation variantAnnotation = variantList . get ( i ) . getAnnotation ( ) ; if ( variantAnnotation != null ) { Map < String , AdditionalAttribute > additionalAttributes = variantAnnotation . getAdditionalAttributes ( ) ; if ( additionalAttributes == null ) { // variantList and variantAnnotationList must contain variants in the SAME order : variantAnnotation // at position i must correspond to variant i variantAnnotation . setAdditionalAttributes ( customAnnotation ) ; } else { additionalAttributes . putAll ( customAnnotation ) ; // variantList and variantAnnotationList must contain variants in the SAME order : variantAnnotation // at position i must correspond to variant i // variantAnnotation . setAdditionalAttributes ( additionalAttributes ) ; } } } }
public class AWSServiceCatalogClient { /** * Copies the specified source product to the specified target product or a new product . * You can copy a product to the same account or another account . You can copy a product to the same region or * another region . * This operation is performed asynchronously . To track the progress of the operation , use * < a > DescribeCopyProductStatus < / a > . * @ param copyProductRequest * @ return Result of the CopyProduct operation returned by the service . * @ throws ResourceNotFoundException * The specified resource was not found . * @ throws InvalidParametersException * One or more parameters provided to the operation are not valid . * @ sample AWSServiceCatalog . CopyProduct * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / servicecatalog - 2015-12-10 / CopyProduct " target = " _ top " > AWS API * Documentation < / a > */ @ Override public CopyProductResult copyProduct ( CopyProductRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeCopyProduct ( request ) ;
public class AutoSizingTextArea { /** * Returns the size of the shadow element */ @ Override public int getShadowSize ( ) { } }
Element shadowElement = shadow . getElement ( ) ; shadowElement . setScrollTop ( 10000 ) ; return shadowElement . getScrollTop ( ) ;
public class SpecializedOps_DDRM { /** * Creates a reflector from the provided vector and gamma . < br > * < br > * Q = I - & gamma ; u u < sup > T < / sup > < br > * In practice { @ link VectorVectorMult _ DDRM # householder ( double , DMatrixD1 , DMatrixD1 , DMatrixD1 ) } multHouseholder } * should be used for performance reasons since there is no need to calculate Q explicitly . * @ param u A vector . Not modified . * @ param gamma To produce a reflector gamma needs to be equal to 2 / | | u | | . * @ return An orthogonal reflector . */ public static DMatrixRMaj createReflector ( DMatrixRMaj u , double gamma ) { } }
if ( ! MatrixFeatures_DDRM . isVector ( u ) ) throw new IllegalArgumentException ( "u must be a vector" ) ; DMatrixRMaj Q = CommonOps_DDRM . identity ( u . getNumElements ( ) ) ; CommonOps_DDRM . multAddTransB ( - gamma , u , u , Q ) ; return Q ;
public class AlexaInput { /** * Checks if a slot is contained in the intent request and its value is not blank . * @ param slotName name of the slot to look after * @ return True , if the slot exists in the intent request and is not blank . */ public boolean hasSlotNotBlank ( final String slotName ) { } }
return hasSlot ( slotName ) && StringUtils . isNotBlank ( intentRequest . getIntent ( ) . getSlot ( slotName ) . getValue ( ) ) ;
public class CachedResponseImpl { /** * Writes the cached headers to the response * @ param pResponse the response */ public void writeHeadersTo ( final CacheResponse pResponse ) { } }
String [ ] headers = getHeaderNames ( ) ; for ( String header : headers ) { // HACK . . . // Strip away internal headers if ( HTTPCache . HEADER_CACHED_TIME . equals ( header ) ) { continue ; } // TODO : Replace Last - Modified with X - Cached - At ? See CachedEntityImpl String [ ] headerValues = getHeaderValues ( header ) ; for ( int i = 0 ; i < headerValues . length ; i ++ ) { String headerValue = headerValues [ i ] ; if ( i == 0 ) { pResponse . setHeader ( header , headerValue ) ; } else { pResponse . addHeader ( header , headerValue ) ; } } }
public class Broadcast { /** * Broadcast absolute max op . See : { @ link BroadcastAMax } */ public static INDArray amax ( INDArray x , INDArray y , INDArray z , int ... dimensions ) { } }
if ( dimensions == null || dimensions . length == 0 ) { validateShapesNoDimCase ( x , y , z ) ; return Nd4j . getExecutioner ( ) . exec ( new AMax ( x , y , z ) ) ; } return Nd4j . getExecutioner ( ) . exec ( new BroadcastAMax ( x , y , z , dimensions ) ) ;
public class AuthorizationHeaderProvider { /** * Gets the OAuth2 header . * @ throws OAuthException if the OAuth2 token could not be refreshed . */ private String getOAuth2Header ( OAuth2Compatible oAuth2Compatible ) throws OAuthException { } }
if ( adsLibConfiguration . isAutoRefreshOAuth2TokenEnabled ( ) ) { try { oAuth2Helper . refreshCredential ( oAuth2Compatible . getOAuth2Credential ( ) ) ; } catch ( IOException e ) { throw new OAuthException ( "OAuth2 token could not be refreshed." , e ) ; } } return oAuth2AuthorizationHeaderProvider . getOAuth2AuthorizationHeader ( oAuth2Compatible ) ;
public class systemuser { /** * Use this API to update systemuser resources . */ public static base_responses update ( nitro_service client , systemuser resources [ ] ) throws Exception { } }
base_responses result = null ; if ( resources != null && resources . length > 0 ) { systemuser updateresources [ ] = new systemuser [ resources . length ] ; for ( int i = 0 ; i < resources . length ; i ++ ) { updateresources [ i ] = new systemuser ( ) ; updateresources [ i ] . username = resources [ i ] . username ; updateresources [ i ] . password = resources [ i ] . password ; updateresources [ i ] . externalauth = resources [ i ] . externalauth ; updateresources [ i ] . promptstring = resources [ i ] . promptstring ; updateresources [ i ] . timeout = resources [ i ] . timeout ; } result = update_bulk_request ( client , updateresources ) ; } return result ;
public class ModuleDeps { /** * Calls { @ link ModuleDepInfo # resolveWith ( Features ) } on each of the values in the map . * @ param features * the feature set to apply . * @ param coerceUndefinedToFalse * if true , undefined features will be treated as false * @ return the current object */ public ModuleDeps resolveWith ( Features features , boolean coerceUndefinedToFalse ) { } }
for ( ModuleDepInfo info : values ( ) ) { info . resolveWith ( features , coerceUndefinedToFalse ) ; } return this ;
public class AbstractBundleLinkRenderer { /** * Creates a link to a bundle in the page . * @ param bundleId * the bundle ID * @ param bundlePrefix * the bundle prefix * @ param randomParam * the flag indicating if we should use randomParam * @ param contextPath * the context path * @ param isSslRequest * the flag indicating if it ' s an SSL request * @ return the link to a bundle in the page */ protected String createBundleLink ( String bundleId , String bundlePrefix , String randomParam , String contextPath , boolean isSslRequest ) { } }
// When debug mode is on and the resource is generated the path must // include a parameter String path = bundleId ; String fullPath = null ; if ( bundler . getConfig ( ) . isDebugModeOn ( ) ) { if ( bundler . getConfig ( ) . getGeneratorRegistry ( ) . isPathGenerated ( bundleId ) ) { path = PathNormalizer . createGenerationPath ( bundleId , bundler . getConfig ( ) . getGeneratorRegistry ( ) , randomParam ) ; } else { if ( StringUtils . isNotEmpty ( randomParam ) ) { path = bundleId + "?" + randomParam ; } } fullPath = PathNormalizer . joinPaths ( bundler . getConfig ( ) . getServletMapping ( ) , path ) ; } else { if ( StringUtils . isNotEmpty ( bundlePrefix ) ) { fullPath = PathNormalizer . joinPaths ( bundler . getConfig ( ) . getServletMapping ( ) , PathNormalizer . joinPaths ( bundlePrefix , path ) ) ; } else { fullPath = PathNormalizer . joinPaths ( bundler . getConfig ( ) . getServletMapping ( ) , path ) ; } } fullPath = RendererRequestUtils . getRenderedUrl ( fullPath , bundler . getConfig ( ) , contextPath , isSslRequest ) ; // allow debugOverride to pass through on the generated urls if ( ThreadLocalJawrContext . isDebugOverriden ( ) ) { fullPath = PathNormalizer . addGetParameter ( fullPath , "overrideKey" , bundler . getConfig ( ) . getDebugOverrideKey ( ) ) ; } return renderLink ( fullPath ) ;
public class Task { /** * Retrieve the finish slack . * @ return finish slack */ public Duration getFinishSlack ( ) { } }
Duration finishSlack = ( Duration ) getCachedValue ( TaskField . FINISH_SLACK ) ; if ( finishSlack == null ) { Duration duration = getDuration ( ) ; if ( duration != null ) { finishSlack = DateHelper . getVariance ( this , getEarlyFinish ( ) , getLateFinish ( ) , duration . getUnits ( ) ) ; set ( TaskField . FINISH_SLACK , finishSlack ) ; } } return ( finishSlack ) ;
public class WSSubject { /** * This convenient method returns the caller principal of the * current executing thread . * It will extract the caller from the received credentials of * the current thread . If the received credentials is null , then * a value of null is returned . In the EJB and Web container , * user should use the standard interface provided by the J2EE * specification to get the caller principal or caller name . This * method call provides a way for code executing outside the * containers to get the caller principal . The principal name * return is not qualified with the security realm name . * @ return The principal name ( without the security realm ) . If the * received credential is null , then the value of null will * be returned as the caller principal . */ public static String getCallerPrincipal ( ) { } }
String caller = null ; SubjectManagerService sms = smServiceRef . getService ( ) ; if ( sms != null ) { Subject subject = sms . getCallerSubject ( ) ; if ( subject != null ) { WSCredential wsCred = getWSCredential ( subject ) ; if ( wsCred != null && ! wsCred . isUnauthenticated ( ) ) { try { caller = wsCred . getSecurityName ( ) ; } catch ( Exception e ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) Tr . debug ( tc , "Internal error: " + e ) ; } } } } return caller ;
public class Task { /** * This method allows a resource assignment to be added to the * current task . * @ param resource the resource to assign * @ return ResourceAssignment object */ public ResourceAssignment addResourceAssignment ( Resource resource ) { } }
ResourceAssignment assignment = getExistingResourceAssignment ( resource ) ; if ( assignment == null ) { assignment = new ResourceAssignment ( getParentFile ( ) , this ) ; m_assignments . add ( assignment ) ; getParentFile ( ) . getResourceAssignments ( ) . add ( assignment ) ; assignment . setTaskUniqueID ( getUniqueID ( ) ) ; assignment . setWork ( getDuration ( ) ) ; assignment . setUnits ( ResourceAssignment . DEFAULT_UNITS ) ; if ( resource != null ) { assignment . setResourceUniqueID ( resource . getUniqueID ( ) ) ; resource . addResourceAssignment ( assignment ) ; } } return ( assignment ) ;
public class BaseCommandTask { /** * Prompt the user to enter text . * Prompts twice and compares to ensure it was entered correctly . * @ return Entered String */ private String promptForText ( ConsoleWrapper stdin , PrintStream stdout , String enterText , String reenterText , String readError , String entriesDidNotMatch ) { } }
String read1 = stdin . readMaskedText ( getMessage ( enterText ) + " " ) ; String read2 = stdin . readMaskedText ( getMessage ( reenterText ) + " " ) ; if ( read1 == null && read2 == null ) { throw new IllegalArgumentException ( "Unable to read either entry. Aborting prompt." ) ; } else if ( read1 == null || read2 == null ) { stdout . println ( getMessage ( readError ) ) ; return promptForText ( stdin , stdout , enterText , reenterText , readError , entriesDidNotMatch ) ; } else if ( read1 . equals ( read2 ) ) { return read1 ; } else { stdout . println ( getMessage ( entriesDidNotMatch ) ) ; return promptForText ( stdin , stdout , enterText , reenterText , readError , entriesDidNotMatch ) ; }
public class SibRaDynamicDestinationEndpointActivation { /** * Connects to the given messaging engine . Registers a destination listener * and creates listeners for each of the current destinations . * @ param messagingEngine * the messaging engine to connect to */ synchronized protected void addMessagingEngine ( final JsMessagingEngine messagingEngine ) { } }
final String methodName = "addMessagingEngine" ; if ( TraceComponent . isAnyTracingEnabled ( ) && TRACE . isEntryEnabled ( ) ) { SibTr . entry ( this , TRACE , methodName , messagingEngine ) ; } SibRaMessagingEngineConnection connection = null ; try { /* * Get a connection for the messaging engine */ connection = getConnection ( messagingEngine ) ; final SICoreConnection coreConnection = connection . getConnection ( ) ; if ( coreConnection instanceof SICoreConnection ) { /* * Create destination listener */ final DestinationListener destinationListener = new SibRaDestinationListener ( connection , _messageEndpointFactory ) ; /* * Determine destination type */ final DestinationType destinationType = _endpointConfiguration . getDestinationType ( ) ; /* * Register destination listener */ final SIDestinationAddress [ ] destinations = coreConnection . addDestinationListener ( null , destinationListener , destinationType , DestinationAvailability . RECEIVE ) ; /* * Create a listener for each destination . . . */ for ( int j = 0 ; j < destinations . length ; j ++ ) { try { connection . createListener ( destinations [ j ] , _messageEndpointFactory ) ; } catch ( final ResourceException exception ) { FFDCFilter . processException ( exception , CLASS_NAME + "." + methodName , FFDC_PROBE_1 , this ) ; SibTr . error ( TRACE , "CREATE_LISTENER_FAILED_CWSIV0803" , new Object [ ] { exception , destinations [ j ] . getDestinationName ( ) , messagingEngine . getName ( ) , messagingEngine . getBusName ( ) } ) ; } } } } catch ( final SIException exception ) { FFDCFilter . processException ( exception , CLASS_NAME + "." + methodName , FFDC_PROBE_2 , this ) ; SibTr . error ( TRACE , "ADD_DESTINATION_LISTENER_FAILED_CWSIV0804" , new Object [ ] { exception , messagingEngine . getName ( ) , messagingEngine . getBusName ( ) } ) ; closeConnection ( messagingEngine ) ; } catch ( final ResourceException exception ) { FFDCFilter . processException ( exception , CLASS_NAME + "." + methodName , FFDC_PROBE_3 , this ) ; SibTr . error ( TRACE , "CREATE_CONNECTION_FAILED_CWSIV0801" , new Object [ ] { exception , messagingEngine . getName ( ) , messagingEngine . getBusName ( ) } ) ; closeConnection ( messagingEngine ) ; } if ( TraceComponent . isAnyTracingEnabled ( ) && TRACE . isEntryEnabled ( ) ) { SibTr . exit ( this , TRACE , methodName ) ; }
public class GroupContactSet { /** * Tell whether the given group pair is a contact in this GroupContactSet , * the comparison is done by matching residue numbers and chain identifiers * @ param group1 * @ param group2 * @ return */ public boolean hasContact ( Group group1 , Group group2 ) { } }
return hasContact ( group1 . getResidueNumber ( ) , group2 . getResidueNumber ( ) ) ;
public class TagSupport { /** * check if value is not empty * @ param tagName * @ param actionName * @ param attributeName * @ param attribute * @ throws PageException */ public void required ( final String tagName , final String actionName , final String attributeName , final Object attribute ) throws PageException { } }
if ( attribute == null ) { final Excepton util = CFMLEngineFactory . getInstance ( ) . getExceptionUtil ( ) ; throw util . createApplicationException ( "Attribute [" + attributeName + "] for tag [" + tagName + "] is required if attribute action has the value [" + actionName + "]" ) ; }
public class NullnessAnalysis { /** * Returns the { @ link Nullness } of the leaf of { @ code exprPath } . * < p > If the leaf required the compiler to generate autoboxing or autounboxing calls , { @ code * getNullness } returns the { @ code Nullness } < i > after < / i > the boxing / unboxing . This implies that , * in those cases , it will always return { @ code NONNULL } . */ public Nullness getNullness ( TreePath exprPath , Context context ) { } }
try { nullnessPropagation . setContext ( context ) . setCompilationUnit ( exprPath . getCompilationUnit ( ) ) ; return DataFlow . expressionDataflow ( exprPath , context , nullnessPropagation ) ; } finally { nullnessPropagation . setContext ( null ) . setCompilationUnit ( null ) ; }
public class PeasyRecyclerView { /** * Present as Vertical List View * Execute { @ link # resetItemDecorations ( ) } * Execute { @ link # resetItemAnimator ( ) } * @ return LinearLayoutManager */ public LinearLayoutManager asVerticalListView ( ) { } }
this . presentation = PeasyPresentation . VerticalList ; resetItemDecorations ( ) ; resetItemAnimator ( ) ; final LinearLayoutManager layoutManager = PeasyRecyclerView . VerticalList . newLayoutManager ( getContext ( ) ) ; getRecyclerView ( ) . setLayoutManager ( layoutManager ) ; getRecyclerView ( ) . addItemDecoration ( new DividerItemDecoration ( getContext ( ) , layoutManager . getOrientation ( ) ) ) ; getRecyclerView ( ) . setItemAnimator ( new DefaultItemAnimator ( ) ) ; return layoutManager ;
public class WsMessageRouterImpl { /** * Add the WsLogHandler ref . 1 or more LogHandlers may be set . */ public void setWsLogHandler ( String id , WsLogHandler ref ) { } }
if ( id != null && ref != null ) { // There can be many Reader locks , but only one writer lock . // This ReaderWriter lock is needed to avoid duplicate messages when the class is passing on EarlyBuffer messages to the new WsLogHandler . RERWLOCK . writeLock ( ) . lock ( ) ; try { wsLogHandlerServices . put ( id , ref ) ; /* * Route prev messages to the new LogHandler . * This is primarily for solving the problem during server init where the WsMessageRouterImpl * is registered * after * we ' ve already issued some early startup messages . We cache * these early messages in the " earlierMessages " queue in BaseTraceService , which then * passes them to WsMessageRouterImpl once it ' s registered . */ if ( earlierMessages == null ) { return ; } for ( RoutedMessage earlierMessage : earlierMessages . toArray ( new RoutedMessage [ earlierMessages . size ( ) ] ) ) { if ( shouldRouteMessageToLogHandler ( earlierMessage , id ) ) { routeTo ( earlierMessage , id ) ; } } } finally { RERWLOCK . writeLock ( ) . unlock ( ) ; } }
public class CertifyingSigner { /** * Get a certifying signer instance from the given signer factory for a given certifier . * @ param forSigning true for signing , and false for verifying . * @ param certifier the certified key pair of the certifier . * @ param factory a signer factory to create the signer . * @ return a certifying signer . */ public static CertifyingSigner getInstance ( boolean forSigning , CertifiedKeyPair certifier , SignerFactory factory ) { } }
return new CertifyingSigner ( certifier . getCertificate ( ) , factory . getInstance ( forSigning , certifier . getPrivateKey ( ) ) ) ;
public class MvcDialog { /** * Show dialog . * @ param fragmentManager The fragment manager . Usually it ' s the child fragment manager of the * fragment on which the dialog will show * @ param dialogClass The class type of the dialog extending { @ link MvcDialog } */ public static void show ( FragmentManager fragmentManager , Class < ? extends MvcDialog > dialogClass ) { } }
FragmentTransaction ft = fragmentManager . beginTransaction ( ) ; MvcDialog dialogFragment = ( MvcDialog ) fragmentManager . findFragmentByTag ( dialogClass . getName ( ) ) ; if ( dialogFragment == null ) { try { dialogFragment = new ReflectUtils . newObjectByType < > ( dialogClass ) . newInstance ( ) ; } catch ( Exception e ) { e . printStackTrace ( ) ; } } ft . addToBackStack ( null ) ; dialogFragment . show ( ft , dialogClass . getName ( ) ) ;
public class StatementMarshaller { /** * Marshall the given parameter object . */ public void marshall ( Statement statement , ProtocolMarshaller protocolMarshaller ) { } }
if ( statement == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( statement . getMessages ( ) , MESSAGES_BINDING ) ; protocolMarshaller . marshall ( statement . getResponseCard ( ) , RESPONSECARD_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class EDBConverter { /** * Generate the value for a specific property of a model out of an EDBObject . */ private Object getValueForProperty ( PropertyDescriptor propertyDescriptor , EDBObject object ) { } }
Method setterMethod = propertyDescriptor . getWriteMethod ( ) ; String propertyName = propertyDescriptor . getName ( ) ; Object value = object . getObject ( propertyName ) ; Class < ? > parameterType = setterMethod . getParameterTypes ( ) [ 0 ] ; // TODO : OPENENGSB - 2719 do that in a better way than just an if - else series if ( Map . class . isAssignableFrom ( parameterType ) ) { List < Class < ? > > classes = getGenericMapParameterClasses ( setterMethod ) ; value = getMapValue ( classes . get ( 0 ) , classes . get ( 1 ) , propertyName , object ) ; } else if ( List . class . isAssignableFrom ( parameterType ) ) { Class < ? > clazz = getGenericListParameterClass ( setterMethod ) ; value = getListValue ( clazz , propertyName , object ) ; } else if ( parameterType . isArray ( ) ) { Class < ? > clazz = parameterType . getComponentType ( ) ; value = getArrayValue ( clazz , propertyName , object ) ; } else if ( value == null ) { return null ; } else if ( OpenEngSBModel . class . isAssignableFrom ( parameterType ) ) { Object timestamp = object . getObject ( EDBConstants . MODEL_TIMESTAMP ) ; Long time = System . currentTimeMillis ( ) ; if ( timestamp != null ) { try { time = Long . parseLong ( timestamp . toString ( ) ) ; } catch ( NumberFormatException e ) { LOGGER . warn ( "The model with the oid {} has an invalid timestamp." , object . getOID ( ) ) ; } } EDBObject obj = edbService . getObject ( ( String ) value , time ) ; value = convertEDBObjectToUncheckedModel ( parameterType , obj ) ; object . remove ( propertyName ) ; } else if ( parameterType . equals ( FileWrapper . class ) ) { FileWrapper wrapper = new FileWrapper ( ) ; String filename = object . getString ( propertyName + FILEWRAPPER_FILENAME_SUFFIX ) ; String content = ( String ) value ; wrapper . setFilename ( filename ) ; wrapper . setContent ( Base64 . decodeBase64 ( content ) ) ; value = wrapper ; object . remove ( propertyName + FILEWRAPPER_FILENAME_SUFFIX ) ; } else if ( parameterType . equals ( File . class ) ) { return null ; } else if ( object . containsKey ( propertyName ) ) { if ( parameterType . isEnum ( ) ) { value = getEnumValue ( parameterType , value ) ; } } object . remove ( propertyName ) ; return value ;
public class CrawlController { /** * Wait until this crawling session finishes . */ public void waitUntilFinish ( ) { } }
while ( ! finished ) { synchronized ( waitingLock ) { if ( config . isHaltOnError ( ) ) { Throwable t = getError ( ) ; if ( t != null && config . isHaltOnError ( ) ) { if ( t instanceof RuntimeException ) { throw ( RuntimeException ) t ; } else if ( t instanceof Error ) { throw ( Error ) t ; } else { throw new RuntimeException ( "error on monitor thread" , t ) ; } } } if ( finished ) { return ; } try { waitingLock . wait ( ) ; } catch ( InterruptedException e ) { logger . error ( "Error occurred" , e ) ; } } }
public class Base64 { /** * Similar to { @ link # encodeBytes ( byte [ ] ) } but returns a byte array instead of * instantiating a String . This is more efficient if you ' re working with I / O * streams and have large data sets to encode . * @ param source * The data to convert * @ return The Base64 - encoded data as a byte [ ] ( of ASCII characters ) * @ throws NullPointerException * if source array is null * @ since 2.3.1 */ @ Nonnull @ ReturnsMutableCopy public static byte [ ] encodeBytesToBytes ( @ Nonnull final byte [ ] source ) { } }
byte [ ] encoded ; try { encoded = encodeBytesToBytes ( source , 0 , source . length , NO_OPTIONS ) ; } catch ( final IOException ex ) { throw new IllegalStateException ( "IOExceptions only come from GZipping, which is turned off" , ex ) ; } return encoded ;
public class ConnectController { /** * Returns a RedirectView with the URL to redirect to after a connection is created or deleted . * Defaults to " / connect / { providerId } " relative to DispatcherServlet ' s path . * May be overridden to handle custom redirection needs . * @ param providerId the ID of the provider for which a connection was created or deleted . * @ param request the NativeWebRequest used to access the servlet path when constructing the redirect path . * @ return a RedirectView to the page to be displayed after a connection is created or deleted */ protected RedirectView connectionStatusRedirect ( String providerId , NativeWebRequest request ) { } }
HttpServletRequest servletRequest = request . getNativeRequest ( HttpServletRequest . class ) ; String path = connectionStatusUrlPath + providerId + getPathExtension ( servletRequest ) ; if ( prependServletPath ( servletRequest ) ) { path = servletRequest . getServletPath ( ) + path ; } return new RedirectView ( path , true ) ;
public class PayCreatResponse { /** * 第三方的支付流水号 */ @ Override public PayCreatResponse retcode ( int retcode ) { } }
this . retcode = retcode ; this . retinfo = PayRetCodes . retInfo ( retcode ) ; return this ;
public class JawrRequestHandler { /** * Create the Jawr config from the properties * @ param props * the properties * @ return the Jawr config */ protected JawrConfig createJawrConfig ( Properties props ) { } }
jawrConfig = new JawrConfig ( resourceType , props , configPropResolver ) ; // Override properties which are incompatible with the build time bundle // processing if ( ThreadLocalJawrContext . isBundleProcessingAtBuildTime ( ) ) { jawrConfig . setUseBundleMapping ( true ) ; // Use the standard working directory jawrConfig . setJawrWorkingDirectory ( null ) ; } return jawrConfig ;
public class KeyReader { /** * Make the request to the Twilio API to perform the read . * @ param client TwilioRestClient with which to make the request * @ return Key ResourceSet */ @ Override public ResourceSet < Key > read ( final TwilioRestClient client ) { } }
return new ResourceSet < > ( this , client , firstPage ( client ) ) ;
public class BoundsCalculator { /** * Calculate the bounding rectangle for a reaction set . * @ param reactionSet the reaction set to use * @ return the bounding rectangle of the reaction set */ public static Rectangle2D calculateBounds ( IReactionSet reactionSet ) { } }
Rectangle2D totalBounds = new Rectangle2D . Double ( ) ; for ( IReaction reaction : reactionSet . reactions ( ) ) { Rectangle2D reactionBounds = calculateBounds ( reaction ) ; if ( totalBounds . isEmpty ( ) ) { totalBounds = reactionBounds ; } else { Rectangle2D . union ( totalBounds , reactionBounds , totalBounds ) ; } } return totalBounds ;
public class JDBCValueContentAddressStorageImpl { /** * { @ inheritDoc } */ public void addValue ( String propertyId , int orderNum , String identifier ) throws VCASException { } }
try { Connection con = dataSource . getConnection ( ) ; try { PreparedStatement ps = con . prepareStatement ( sqlAddRecord ) ; ps . setString ( 1 , propertyId ) ; ps . setInt ( 2 , orderNum ) ; ps . setString ( 3 , identifier ) ; ps . executeUpdate ( ) ; ps . close ( ) ; } finally { con . close ( ) ; } } catch ( SQLException e ) { // check is it a primary key vioaltion or smth else // if primary key - it ' s record already exists issue , VCAS error otherwise . if ( isRecordAlreadyExistsException ( e ) ) { throw new RecordAlreadyExistsException ( "Record already exists, propertyId=" + propertyId + " orderNum=" + orderNum + ". Error: " + e , e ) ; } throw new VCASException ( "VCAS ADD database error: " + e , e ) ; }
public class MDAG { /** * 后缀查询 < br > * Retrieves all the Strings in the MDAG that begin with a given String . * @ param suffixStr a String that is the suffix for all the desired Strings * @ return a HashSet containing all the Strings present in the MDAG that end with { @ code suffixStr } */ public HashSet < String > getStringsEndingWith ( String suffixStr ) { } }
HashSet < String > strHashSet = new HashSet < String > ( ) ; if ( sourceNode != null ) // if the MDAG hasn ' t been simplified getStrings ( strHashSet , SearchCondition . SUFFIX_SEARCH_CONDITION , suffixStr , "" , sourceNode . getOutgoingTransitions ( ) ) ; else getStrings ( strHashSet , SearchCondition . SUFFIX_SEARCH_CONDITION , suffixStr , "" , simplifiedSourceNode ) ; return strHashSet ;
public class GlobalExceptionHandler { /** * 构造错误 * @ param mvc mvc * @ param error 错误 * @ return mvc */ @ SuppressWarnings ( "rawtypes" ) private ModelAndView buildError ( ModelAndView mvc , ResultCode error ) { } }
mvc . addObject ( "data" , new ArrayList ( 0 ) ) ; mvc . addObject ( "status" , error . getCode ( ) ) ; Map < String , Object > statusInfo = Maps . newHashMapWithExpectedSize ( 1 ) ; statusInfo . put ( WebResponseConstant . MESSAGE_GLOBAL , error . getMessage ( ) . getMessage ( ) ) ; mvc . addObject ( "statusInfo" , statusInfo ) ; mvc . addObject ( "msg" , error . getMessage ( ) . getMessage ( ) ) ; return mvc ;
public class HasFilteringCompilerPass { /** * This method walks the AST looking for has calls . If the condition being tested * by the has call is specified in the features , then replace the node for the call in * the AST with a new node for a literal true / false . We rely on the optimizer to * detect and remove any resulting dead branches . */ private void processChildren ( Node n ) { } }
for ( Node cursor = n . getFirstChild ( ) ; cursor != null ; cursor = cursor . getNext ( ) ) { if ( cursor . getType ( ) == Token . CALL ) { String hasCondition = NodeUtil . conditionFromHasNode ( cursor ) ; if ( hasCondition != null ) { if ( discoveredHasConditions != null ) { discoveredHasConditions . add ( hasCondition ) ; } Node newNode = null ; if ( features . contains ( hasCondition ) ) { // features contains the condition being tested . Replace the call // with a literal true / false newNode = new Node ( features . isFeature ( hasCondition ) ? Token . TRUE : Token . FALSE ) ; if ( log . isLoggable ( Level . FINEST ) ) log . finest ( "Replaced has call for \"" + hasCondition + "\" with " + Boolean . toString ( features . isFeature ( hasCondition ) ) ) ; // $ NON - NLS - 1 $ / / $ NON - NLS - 2 $ } else if ( this . coerceUndefinedToFalse ) { // Not in features means false . newNode = new Node ( Token . FALSE ) ; if ( log . isLoggable ( Level . FINEST ) ) log . finest ( "Replaced has call for undefined \"" + hasCondition + "\" with false" ) ; // $ NON - NLS - 1 $ / / $ NON - NLS - 2 $ } if ( newNode != null ) { cursor . getParent ( ) . replaceChild ( cursor , newNode ) ; cursor = newNode ; } } } if ( cursor . hasChildren ( ) ) processChildren ( cursor ) ; }
public class LazyIterate { /** * Creates a deferred flattening iterable for the specified iterable */ public static < T , V > LazyIterable < V > flatCollect ( Iterable < T > iterable , Function < ? super T , ? extends Iterable < V > > function ) { } }
return new FlatCollectIterable < T , V > ( iterable , function ) ;
public class OfflinerQueryHandler { /** * Save a result for offline access . * @ param url key . * @ param result value . */ public void put ( String url , String result ) { } }
if ( TextUtils . isEmpty ( url ) ) { return ; } ContentValues contentValues = new ContentValues ( ) ; contentValues . put ( OfflinerDBHelper . REQUEST_RESULT , result ) ; contentValues . put ( OfflinerDBHelper . REQUEST_URL , url ) ; contentValues . put ( OfflinerDBHelper . REQUEST_TIMESTAMP , Calendar . getInstance ( ) . getTime ( ) . getTime ( ) ) ; this . startQuery ( TOKEN_CHECK_SAVED_STATUS , contentValues , getUri ( OfflinerDBHelper . TABLE_CACHE ) , OfflinerDBHelper . PARAMS_CACHE , OfflinerDBHelper . REQUEST_URL + " = '" + url + "'" , null , null ) ;
public class ObjectBuilder { /** * Creates a MapBuilder around the passed instance * @ param instance * @ param < K > the key type * @ param < V > the value type * @ return a MapBuilder object for method chaining */ public static < K , V > MapBuilder < K , V > map ( Map < K , V > instance ) { } }
return new MapBuilder < > ( instance ) ;
public class ListUsersResult { /** * The list of users . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setUserList ( java . util . Collection ) } or { @ link # withUserList ( java . util . Collection ) } if you want to override * the existing values . * @ param userList * The list of users . * @ return Returns a reference to this object so that method calls can be chained together . */ public ListUsersResult withUserList ( User ... userList ) { } }
if ( this . userList == null ) { setUserList ( new java . util . ArrayList < User > ( userList . length ) ) ; } for ( User ele : userList ) { this . userList . add ( ele ) ; } return this ;
public class RangeVariableResolver { /** * Assigns a set of conditions to a range variable . */ void assignToRangeVariable ( RangeVariable rangeVar , int rangeVarIndex , HsqlArrayList exprList , boolean isJoin ) { } }
if ( exprList . isEmpty ( ) ) { return ; } colIndexSetEqual . clear ( ) ; colIndexSetOther . clear ( ) ; for ( int j = 0 , size = exprList . size ( ) ; j < size ; j ++ ) { Expression e = ( Expression ) exprList . get ( j ) ; if ( rangeVar . hasIndexCondition ( ) ) { rangeVar . addCondition ( e , isJoin ) ; exprList . set ( j , null ) ; continue ; } if ( e . getIndexableExpression ( rangeVar ) == null ) { rangeVar . addCondition ( e , isJoin ) ; exprList . set ( j , null ) ; continue ; } // can use index int type = e . getType ( ) ; switch ( type ) { default : { int colIndex = e . getLeftNode ( ) . getColumnIndex ( ) ; colIndexSetOther . add ( colIndex ) ; break ; } case OpTypes . EQUAL : if ( e . exprSubType == OpTypes . ANY_QUANTIFIED ) { Index index = rangeVar . rangeTable . getIndexForColumn ( e . getLeftNode ( ) . nodes [ 0 ] . getColumnIndex ( ) ) ; // code to disable IN optimisation // index = null ; if ( index != null && inExpressions [ rangeVarIndex ] == null ) { inExpressions [ rangeVarIndex ] = e ; inExpressionCount ++ ; } else { rangeVar . addCondition ( e , isJoin ) ; } exprList . set ( j , null ) ; continue ; } // $ FALL - THROUGH $ case OpTypes . IS_NULL : { int colIndex = e . getLeftNode ( ) . getColumnIndex ( ) ; colIndexSetEqual . add ( colIndex ) ; break ; } case OpTypes . NOT : { int colIndex = e . getLeftNode ( ) . getLeftNode ( ) . getColumnIndex ( ) ; colIndexSetOther . add ( colIndex ) ; break ; } } } boolean isEqual = true ; Index idx = rangeVar . rangeTable . getIndexForColumns ( colIndexSetEqual ) ; if ( idx == null ) { isEqual = false ; idx = rangeVar . rangeTable . getIndexForColumns ( colIndexSetOther ) ; } // different procedure for subquery tables if ( idx == null && rangeVar . rangeTable . isSessionBased ) { if ( ! colIndexSetEqual . isEmpty ( ) ) { int [ ] cols = colIndexSetEqual . toArray ( ) ; idx = rangeVar . rangeTable . getIndexForColumns ( cols ) ; } if ( idx == null && ! colIndexSetOther . isEmpty ( ) ) { int [ ] cols = colIndexSetOther . toArray ( ) ; idx = rangeVar . rangeTable . getIndexForColumns ( cols ) ; } } // no index found if ( idx == null ) { for ( int j = 0 , size = exprList . size ( ) ; j < size ; j ++ ) { Expression e = ( Expression ) exprList . get ( j ) ; if ( e != null ) { rangeVar . addCondition ( e , isJoin ) ; } } return ; } // index found int [ ] cols = idx . getColumns ( ) ; int colCount = cols . length ; if ( isEqual && colCount > 1 ) { Expression [ ] firstRowExpressions = new Expression [ cols . length ] ; for ( int j = 0 ; j < exprList . size ( ) ; j ++ ) { Expression e = ( Expression ) exprList . get ( j ) ; if ( e == null ) { continue ; } int type = e . getType ( ) ; if ( type == OpTypes . EQUAL ) { int offset = ArrayUtil . find ( cols , e . getLeftNode ( ) . getColumnIndex ( ) ) ; if ( offset != - 1 && firstRowExpressions [ offset ] == null ) { firstRowExpressions [ offset ] = e ; exprList . set ( j , null ) ; continue ; } } // not used in index lookup rangeVar . addCondition ( e , isJoin ) ; exprList . set ( j , null ) ; } boolean hasNull = false ; for ( int i = 0 ; i < firstRowExpressions . length ; i ++ ) { Expression e = firstRowExpressions [ i ] ; if ( e == null ) { if ( colCount == cols . length ) { colCount = i ; } hasNull = true ; continue ; } if ( hasNull ) { rangeVar . addCondition ( e , isJoin ) ; firstRowExpressions [ i ] = null ; } } rangeVar . addIndexCondition ( firstRowExpressions , idx , colCount , isJoin ) ; return ; } for ( int j = 0 ; j < exprList . size ( ) ; j ++ ) { Expression e = ( Expression ) exprList . get ( j ) ; if ( e == null ) { continue ; } if ( rangeVar . hasIndexCondition ( ) ) { rangeVar . addCondition ( e , isJoin ) ; exprList . set ( j , null ) ; continue ; } boolean isIndexed = false ; if ( e . getType ( ) == OpTypes . NOT && cols [ 0 ] == e . getLeftNode ( ) . getLeftNode ( ) . getColumnIndex ( ) ) { isIndexed = true ; } if ( cols [ 0 ] == e . getLeftNode ( ) . getColumnIndex ( ) ) { if ( e . getRightNode ( ) != null && ! e . getRightNode ( ) . isCorrelated ( ) ) { isIndexed = true ; } if ( e . getType ( ) == OpTypes . IS_NULL ) { isIndexed = true ; } } if ( isIndexed ) { rangeVar . addIndexCondition ( e , idx , isJoin ) ; } else { rangeVar . addCondition ( e , isJoin ) ; } exprList . set ( j , null ) ; }
public class ToStringOption { /** * Return a < code > ToStringOption < / code > instance with { @ link # upToClass } option set . * if the current instance is not { @ link # DEFAULT _ OPTION default instance } then set * on the current instance and return the current instance . Otherwise , clone the default * instance and set on the clone and return the clone * @ param c * @ return this option instance or clone if this is the { @ link # DEFAULT _ OPTION } */ public ToStringOption setUpToClass ( Class < ? > c ) { } }
ToStringOption op = this ; if ( this == DEFAULT_OPTION ) { op = new ToStringOption ( this . appendStatic , this . appendTransient ) ; } op . upToClass = c ; return op ;
public class RythmEngine { /** * Get an new template instance by template source { @ link java . io . File file } * and an array of arguments . * < p > When the args array contains only one element and is of { @ link java . util . Map } type * the the render args are passed to template * { @ link ITemplate # _ _ setRenderArgs ( java . util . Map ) by name } , * otherwise they passes to template instance by position < / p > * @ param file the template source file * @ param args the render args . See { @ link # getTemplate ( String , Object . . . ) } * @ return template instance */ @ SuppressWarnings ( "unchecked" ) public ITemplate getTemplate ( File file , Object ... args ) { } }
boolean typeInferenceEnabled = conf ( ) . typeInferenceEnabled ( ) ; if ( typeInferenceEnabled ) { ParamTypeInferencer . registerParams ( this , args ) ; } String key = S . str ( resourceManager ( ) . get ( file ) . getKey ( ) ) ; if ( typeInferenceEnabled ) { key += ParamTypeInferencer . uuid ( ) ; } TemplateClass tc = classes ( ) . getByTemplate ( key ) ; ITemplate t ; if ( null == tc ) { tc = new TemplateClass ( file , this ) ; t = tc . asTemplate ( this ) ; if ( null == t ) return null ; _templates . put ( tc . getKey ( ) , t ) ; // classes ( ) . add ( key , tc ) ; } else { t = tc . asTemplate ( this ) ; } setRenderArgs ( t , args ) ; return t ;
public class LinuxFile { /** * Runs an ioctl value command on a file descriptor . * @ param command ioctl command * @ param value int ioctl value * @ return result of operation . Zero if everything is OK , less than zero if there was an error . */ public void ioctl ( long command , int value ) throws IOException { } }
final int response = directIOCTL ( getFileDescriptor ( ) , command , value ) ; if ( response < 0 ) throw new LinuxFileException ( ) ;
public class EvaluatorImpl { /** * Perform logical ' and ' between BooleanValue instances */ public static Boolean and ( Boolean a , Boolean b ) { } }
return andTable [ ttIndex ( a ) ] [ ttIndex ( b ) ] ;
public class ClassifiedTBoxImpl { /** * constructs a ClassifiedTBox that has a reduced number of classes and properties in each equivalent class * - each object property equivalence class contains one property ( representative ) * except when the representative property is equivalent to its inverse , in which * case the equivalence class contains both the property and its inverse * - each data property equivalence class contains a single property ( representative ) * - each class equivalence class contains the representative and all domains / ranges * of the representatives of property equivalence classes * in other words , the constructed ClassifiedTBox is the restriction to the vocabulary of the representatives * all other symbols are mapped to the nodes via * Equivalences hash - maps * @ param reasoner * @ return reduced reasoner */ private static ClassifiedTBoxImpl getEquivalenceSimplifiedReasoner ( ClassifiedTBox reasoner ) { } }
// OBJECT PROPERTIES SimpleDirectedGraph < Equivalences < ObjectPropertyExpression > , DefaultEdge > objectProperties = new SimpleDirectedGraph < > ( DefaultEdge . class ) ; // classify vertices for properties for ( Equivalences < ObjectPropertyExpression > node : reasoner . objectPropertiesDAG ( ) ) { ObjectPropertyExpression rep = node . getRepresentative ( ) ; ObjectPropertyExpression repInv = rep . getInverse ( ) ; Equivalences < ObjectPropertyExpression > reducedNode ; if ( ! node . contains ( repInv ) ) reducedNode = new Equivalences < > ( ImmutableSet . of ( rep ) , rep , node . isIndexed ( ) ) ; else // the object property is equivalent to its inverse reducedNode = new Equivalences < > ( ImmutableSet . of ( rep , repInv ) , rep , node . isIndexed ( ) ) ; objectProperties . addVertex ( reducedNode ) ; } EquivalencesDAGImpl < ObjectPropertyExpression > objectPropertyDAG = EquivalencesDAGImpl . reduce ( ( EquivalencesDAGImpl < ObjectPropertyExpression > ) reasoner . objectPropertiesDAG ( ) , objectProperties ) ; // DATA PROPERTIES SimpleDirectedGraph < Equivalences < DataPropertyExpression > , DefaultEdge > dataProperties = new SimpleDirectedGraph < > ( DefaultEdge . class ) ; // classify vertices for properties for ( Equivalences < DataPropertyExpression > node : reasoner . dataPropertiesDAG ( ) ) { DataPropertyExpression rep = node . getRepresentative ( ) ; Equivalences < DataPropertyExpression > reducedNode = new Equivalences < > ( ImmutableSet . of ( rep ) , rep , node . isIndexed ( ) ) ; dataProperties . addVertex ( reducedNode ) ; } EquivalencesDAGImpl < DataPropertyExpression > dataPropertyDAG = EquivalencesDAGImpl . reduce ( ( EquivalencesDAGImpl < DataPropertyExpression > ) reasoner . dataPropertiesDAG ( ) , dataProperties ) ; // CLASSES SimpleDirectedGraph < Equivalences < ClassExpression > , DefaultEdge > classes = new SimpleDirectedGraph < > ( DefaultEdge . class ) ; // classify vertices for classes for ( Equivalences < ClassExpression > node : reasoner . classesDAG ( ) ) { ClassExpression rep = node . getRepresentative ( ) ; ImmutableSet . Builder < ClassExpression > reduced = new ImmutableSet . Builder < > ( ) ; for ( ClassExpression equi : node ) { if ( equi . equals ( rep ) ) { reduced . add ( equi ) ; } else if ( equi instanceof OClass ) { // an entry is created for a named class // OClass equiClass = ( OClass ) equi ; // classEquivalenceMap . put ( equiClass . getName ( ) , ( OClass ) rep ) ; } else if ( equi instanceof ObjectSomeValuesFrom ) { // the property of the existential is a representative of its equivalence class if ( objectPropertyDAG . getVertex ( ( ( ObjectSomeValuesFrom ) equi ) . getProperty ( ) ) != null ) reduced . add ( equi ) ; } else { // the property of the existential is a representative of its equivalence class if ( dataPropertyDAG . getVertex ( ( ( DataSomeValuesFrom ) equi ) . getProperty ( ) ) != null ) reduced . add ( equi ) ; } } Equivalences < ClassExpression > reducedNode = new Equivalences < > ( reduced . build ( ) , rep , node . isIndexed ( ) ) ; classes . addVertex ( reducedNode ) ; } EquivalencesDAGImpl < ClassExpression > classDAG = EquivalencesDAGImpl . reduce ( ( EquivalencesDAGImpl < ClassExpression > ) reasoner . classesDAG ( ) , classes ) ; // DATA RANGES // TODO : a proper implementation is in order here ClassifiedTBoxImpl impl = ( ClassifiedTBoxImpl ) reasoner ; return new ClassifiedTBoxImpl ( impl . classes , impl . objectProperties , impl . dataProperties , impl . annotationProperties , classDAG , objectPropertyDAG , dataPropertyDAG , impl . dataRangeDAG , impl . classDisjointness , impl . objectPropertyDisjointness , impl . dataPropertyDisjointness , impl . reflexiveObjectProperties , impl . irreflexiveObjectProperties , impl . functionalObjectProperties , impl . functionalDataProperties ) ;
public class Button { /** * ( non - Javadoc ) * @ see qc . automation . framework . widget . element . InteractiveElement # getLabel ( ) */ @ Override public String getLabel ( ) throws WidgetException { } }
try { return getText ( ) ; } catch ( Exception e ) { throw new WidgetException ( "Error while getting button text" , getByLocator ( ) , e ) ; }
public class HostAndPort { /** * Splits String into host and port parts . * String must be in ( host + " : " + port ) format . * Port is optional * @ param from String to parse * @ return array of host and port strings */ public static String [ ] extractParts ( String from ) { } }
int idx = from . lastIndexOf ( ":" ) ; String host = idx != - 1 ? from . substring ( 0 , idx ) : from ; String port = idx != - 1 ? from . substring ( idx + 1 ) : "" ; return new String [ ] { host , port } ;
public class AtlasClientV2 { /** * / * Entity Calls */ public AtlasEntityWithExtInfo getEntityByGuid ( String guid ) throws AtlasServiceException { } }
return callAPI ( GET_ENTITY_BY_GUID , AtlasEntityWithExtInfo . class , ( MultivaluedMap < String , String > ) null , guid ) ;
public class ChangeObjects { /** * method to generate the MonomerNotation in String format * @ param id * id of the MonomerNotation * @ param count * Count of the MonomerNotation * @ param annotation * Annotation of the MonomerNotation * @ return MonomerNotation in String format */ private final static String generateIDMonomerNotation ( String id , String count , String annotation ) { } }
if ( id . length ( ) > 1 ) { id = "[" + id + "]" ; } String result = id ; try { if ( ! ( Integer . parseInt ( count ) == 1 ) ) { result += "'" + count + "'" ; } } catch ( NumberFormatException e ) { result += "'" + count + "'" ; } if ( annotation != null ) { result += "\"" + annotation + "\"" ; } return result ;
public class InternalXbaseParser { /** * InternalXbase . g : 642:1 : ruleXMemberFeatureCall : ( ( rule _ _ XMemberFeatureCall _ _ Group _ _ 0 ) ) ; */ public final void ruleXMemberFeatureCall ( ) throws RecognitionException { } }
int stackSize = keepStackSize ( ) ; try { // InternalXbase . g : 646:2 : ( ( ( rule _ _ XMemberFeatureCall _ _ Group _ _ 0 ) ) ) // InternalXbase . g : 647:2 : ( ( rule _ _ XMemberFeatureCall _ _ Group _ _ 0 ) ) { // InternalXbase . g : 647:2 : ( ( rule _ _ XMemberFeatureCall _ _ Group _ _ 0 ) ) // InternalXbase . g : 648:3 : ( rule _ _ XMemberFeatureCall _ _ Group _ _ 0 ) { if ( state . backtracking == 0 ) { before ( grammarAccess . getXMemberFeatureCallAccess ( ) . getGroup ( ) ) ; } // InternalXbase . g : 649:3 : ( rule _ _ XMemberFeatureCall _ _ Group _ _ 0 ) // InternalXbase . g : 649:4 : rule _ _ XMemberFeatureCall _ _ Group _ _ 0 { pushFollow ( FOLLOW_2 ) ; rule__XMemberFeatureCall__Group__0 ( ) ; state . _fsp -- ; if ( state . failed ) return ; } if ( state . backtracking == 0 ) { after ( grammarAccess . getXMemberFeatureCallAccess ( ) . getGroup ( ) ) ; } } } } catch ( RecognitionException re ) { reportError ( re ) ; recover ( input , re ) ; } finally { restoreStackSize ( stackSize ) ; } return ;
public class RecurringData { /** * Calculate start dates for a yearly recurrence . * @ param calendar current date * @ param dates array of start dates */ private void getYearlyDates ( Calendar calendar , List < Date > dates ) { } }
if ( m_relative ) { getYearlyRelativeDates ( calendar , dates ) ; } else { getYearlyAbsoluteDates ( calendar , dates ) ; }
public class MatrixFeatures_ZDRM { /** * Checks to see if a matrix is lower triangular or Hessenberg . A Hessenberg matrix of degree N * has the following property : < br > * < br > * a < sub > ij < / sub > & le ; 0 for all i & lt ; j + N < br > * < br > * A triangular matrix is a Hessenberg matrix of degree 0. * @ param A Matrix being tested . Not modified . * @ param hessenberg The degree of being hessenberg . * @ param tol How close to zero the lower left elements need to be . * @ return If it is an upper triangular / hessenberg matrix or not . */ public static boolean isLowerTriangle ( ZMatrixRMaj A , int hessenberg , double tol ) { } }
tol *= tol ; for ( int i = 0 ; i < A . numRows - hessenberg - 1 ; i ++ ) { for ( int j = i + hessenberg + 1 ; j < A . numCols ; j ++ ) { int index = ( i * A . numCols + j ) * 2 ; double real = A . data [ index ] ; double imag = A . data [ index + 1 ] ; double mag = real * real + imag * imag ; if ( ! ( mag <= tol ) ) { return false ; } } } return true ;
public class StormConfigGenerator { /** * Read yaml config object from the yaml file at specified file . * @ param targetFile target file * @ return config read from yaml * @ throws IOException Fail read yaml file or convert to config object . */ @ SuppressWarnings ( "unchecked" ) public static Map < String , Object > readYaml ( File targetFile ) throws IOException { } }
Map < String , Object > configObject = null ; Yaml yaml = new Yaml ( ) ; InputStream inputStream = null ; InputStreamReader steamReader = null ; try { inputStream = new FileInputStream ( targetFile ) ; steamReader = new InputStreamReader ( inputStream , "UTF-8" ) ; configObject = ( Map < String , Object > ) yaml . load ( steamReader ) ; } catch ( ScannerException ex ) { // ScannerException / IOException are occured . // throw IOException because handling is same . throw new IOException ( ex ) ; } finally { IOUtils . closeQuietly ( inputStream ) ; } return configObject ;
public class MergingWindowSet { /** * Removes the given window from the set of in - flight windows . * @ param window The { @ code Window } to remove . */ public void retireWindow ( W window ) { } }
W removed = this . mapping . remove ( window ) ; if ( removed == null ) { throw new IllegalStateException ( "Window " + window + " is not in in-flight window set." ) ; }
public class HeartbeatImpl { /** * Update a server with a heartbeat update . * @ param server the server to be updated * @ param update the new update information */ void updateServer ( ServerHeartbeat server , UpdateServerHeartbeat update ) { } }
if ( server . isSelf ( ) ) { return ; } String externalId = update . getExternalId ( ) ; updateExternal ( server , externalId ) ; // XXX : validation server . setSeedIndex ( update . getSeedIndex ( ) ) ; if ( server . onHeartbeatUpdate ( update ) ) { if ( server . isUp ( ) ) { onServerStart ( server ) ; } else { onServerStop ( server ) ; } }
public class CurrencyUnitBuilder { /** * Returns a new instance of { @ link BuildableCurrencyUnit } and publishes it so it is * accessible from the { @ code Monetary } singleton . * @ param register if { @ code true } the instance created is published so it is accessible from * the { @ code Monetary } singleton . * @ param locale country Locale for making the currency for the given country . * @ return the new CurrencyUnit instance . * @ see javax . money . Monetary # getCurrency ( String , String . . . ) * @ see javax . money . Monetary # getCurrency ( java . util . Locale , String . . . ) */ public CurrencyUnit build ( boolean register , Locale locale ) { } }
BuildableCurrencyUnit cu = new BuildableCurrencyUnit ( this ) ; if ( register ) { ConfigurableCurrencyUnitProvider . registerCurrencyUnit ( cu ) ; ConfigurableCurrencyUnitProvider . registerCurrencyUnit ( cu , locale ) ; } return cu ;
public class Packer { /** * Add gridx = RELATIVE to the constraints for the current component if how = = * true 0 it if false . */ public Packer setXLeftRelative ( final boolean how ) { } }
if ( how == true ) { gc . gridx = GridBagConstraints . RELATIVE ; } else { gc . gridx = 0 ; } setConstraints ( comp , gc ) ; return this ;
public class StreamUtils { /** * Copy the contents of the given byte array to the given OutputStream . * Leaves the stream open when done . * @ param in the byte array to copy from * @ param out the OutputStream to copy to * @ throws IOException in case of I / O errors */ public static void copy ( byte [ ] in , OutputStream out ) throws IOException { } }
Assert . notNull ( in , "No input byte array specified" ) ; Assert . notNull ( out , "No OutputStream specified" ) ; out . write ( in ) ;
public class AbstractTable { /** * Gets the selected item id or in multiselect mode the selected ids . * @ param table * the table to retrieve the selected ID ( s ) * @ return the ID ( s ) which are selected in the table */ @ SuppressWarnings ( "unchecked" ) public static < T > Set < T > getTableValue ( final Table table ) { } }
final Object value = table . getValue ( ) ; Set < T > idsReturn ; if ( value == null ) { idsReturn = Collections . emptySet ( ) ; } else if ( value instanceof Collection ) { final Collection < T > ids = ( Collection < T > ) value ; idsReturn = ids . stream ( ) . filter ( Objects :: nonNull ) . collect ( Collectors . toSet ( ) ) ; } else { final T id = ( T ) value ; idsReturn = Collections . singleton ( id ) ; } return idsReturn ;