signature stringlengths 43 39.1k | implementation stringlengths 0 450k |
|---|---|
public class Pipe { /** * Stop piping data from input to output . */
public synchronized void stop ( ) { } } | if ( null != m_pump ) { // disconnect pump
m_pump . connect ( null ) ; m_pump . setName ( m_pump . getName ( ) + " (disconnected)" ) ; m_pump = null ; } |
public class SSLHelper { /** * Adds an SSL context to the HttpClient . No trust or client certificate is
* established and a trust - all policy is assumed .
* @ param client the HttpClient
* @ param port SSL port
* @ param server the LibertyServer */
public static void establishSSLContext ( HttpClient client , int port , LibertyServer server ) { } } | establishSSLContext ( client , port , server , null , null , null , null , "TLSv1.2" ) ; |
public class AuthAPI { /** * Request a password reset for the given email and database connection . The response will always be successful even if
* there ' s no user associated to the given email for that database connection .
* i . e . :
* < pre >
* { @ code
* AuthAPI auth = new AuthAPI ( " me . auth0 . com " , " B3c6RYhk1v9SbIJcRIOwu62gIUGsnze " , " 2679NfkaBn62e6w5E8zNEzjr - yWfkaBne " ) ;
* try {
* auth . resetPassword ( " me @ auth0 . com " , " db - connection " ) . execute ( ) ;
* } catch ( Auth0Exception e ) {
* / / Something happened
* < / pre >
* @ param email the email associated to the database user .
* @ param connection the database connection where the user was created .
* @ return a Request to execute . */
public Request resetPassword ( String email , String connection ) { } } | Asserts . assertNotNull ( email , "email" ) ; Asserts . assertNotNull ( connection , "connection" ) ; String url = baseUrl . newBuilder ( ) . addPathSegment ( PATH_DBCONNECTIONS ) . addPathSegment ( "change_password" ) . build ( ) . toString ( ) ; VoidRequest request = new VoidRequest ( client , url , "POST" ) ; request . addParameter ( KEY_CLIENT_ID , clientId ) ; request . addParameter ( KEY_EMAIL , email ) ; request . addParameter ( KEY_CONNECTION , connection ) ; return request ; |
public class GenerateReferenceParetoSetAndFrontFromDoubleSolutions { /** * The run ( ) method creates de output directory and compute the fronts */
@ Override public void run ( ) throws IOException { } } | String outputDirectoryName = experiment . getReferenceFrontDirectory ( ) ; createOutputDirectory ( outputDirectoryName ) ; List < String > referenceFrontFileNames = new LinkedList < > ( ) ; for ( ExperimentProblem < ? > problem : experiment . getProblemList ( ) ) { List < DoubleSolution > nonDominatedSolutions = getNonDominatedSolutions ( problem ) ; referenceFrontFileNames . add ( problem . getReferenceFront ( ) ) ; writeReferenceFrontFile ( outputDirectoryName , problem , nonDominatedSolutions ) ; writeReferenceSetFile ( outputDirectoryName , problem , nonDominatedSolutions ) ; writeFilesWithTheSolutionsContributedByEachAlgorithm ( outputDirectoryName , problem , nonDominatedSolutions ) ; } |
public class DeleteHandlerV1 { /** * Get the GUIDs and vertices for all composite entities owned / contained by the specified root entity AtlasVertex .
* The graph is traversed from the root entity through to the leaf nodes of the containment graph .
* @ param entityVertex the root entity vertex
* @ return set of VertexInfo for all composite entities
* @ throws AtlasException */
public Set < GraphHelper . VertexInfo > getOwnedVertices ( AtlasVertex entityVertex ) throws AtlasBaseException { } } | Set < GraphHelper . VertexInfo > result = new LinkedHashSet < > ( ) ; Stack < AtlasVertex > vertices = new Stack < > ( ) ; vertices . push ( entityVertex ) ; while ( vertices . size ( ) > 0 ) { AtlasVertex vertex = vertices . pop ( ) ; AtlasEntity . Status state = AtlasGraphUtilsV1 . getState ( vertex ) ; if ( state == AtlasEntity . Status . DELETED ) { // If the reference vertex is marked for deletion , skip it
continue ; } String typeName = GraphHelper . getTypeName ( vertex ) ; String guid = GraphHelper . getGuid ( vertex ) ; result . add ( new GraphHelper . VertexInfo ( guid , vertex , typeName ) ) ; AtlasEntityType entityType = typeRegistry . getEntityTypeByName ( typeName ) ; if ( entityType == null ) { throw new AtlasBaseException ( AtlasErrorCode . TYPE_NAME_INVALID , TypeCategory . ENTITY . name ( ) , typeName ) ; } for ( AtlasStructType . AtlasAttribute attributeInfo : entityType . getAllAttributes ( ) . values ( ) ) { if ( ! attributeInfo . isOwnedRef ( ) ) { continue ; } String edgeLabel = AtlasGraphUtilsV1 . getAttributeEdgeLabel ( entityType , attributeInfo . getName ( ) ) ; AtlasType attrType = attributeInfo . getAttributeType ( ) ; switch ( attrType . getTypeCategory ( ) ) { case OBJECT_ID_TYPE : AtlasEdge edge = graphHelper . getEdgeForLabel ( vertex , edgeLabel ) ; if ( edge != null && AtlasGraphUtilsV1 . getState ( edge ) == AtlasEntity . Status . ACTIVE ) { AtlasVertex compositeVertex = edge . getInVertex ( ) ; vertices . push ( compositeVertex ) ; } break ; case ARRAY : AtlasArrayType arrType = ( AtlasArrayType ) attrType ; if ( arrType . getElementType ( ) . getTypeCategory ( ) != TypeCategory . OBJECT_ID_TYPE ) { continue ; } Iterator < AtlasEdge > edges = graphHelper . getOutGoingEdgesByLabel ( vertex , edgeLabel ) ; if ( edges != null ) { while ( edges . hasNext ( ) ) { edge = edges . next ( ) ; if ( edge != null && AtlasGraphUtilsV1 . getState ( edge ) == AtlasEntity . Status . ACTIVE ) { AtlasVertex compositeVertex = edge . getInVertex ( ) ; vertices . push ( compositeVertex ) ; } } } break ; case MAP : AtlasMapType mapType = ( AtlasMapType ) attrType ; TypeCategory valueTypeCategory = mapType . getValueType ( ) . getTypeCategory ( ) ; if ( valueTypeCategory != TypeCategory . OBJECT_ID_TYPE ) { continue ; } String propertyName = AtlasGraphUtilsV1 . getQualifiedAttributePropertyKey ( entityType , attributeInfo . getName ( ) ) ; List < String > keys = vertex . getProperty ( propertyName , List . class ) ; if ( keys != null ) { for ( String key : keys ) { String mapEdgeLabel = GraphHelper . getQualifiedNameForMapKey ( edgeLabel , key ) ; edge = graphHelper . getEdgeForLabel ( vertex , mapEdgeLabel ) ; if ( edge != null && AtlasGraphUtilsV1 . getState ( edge ) == AtlasEntity . Status . ACTIVE ) { AtlasVertex compositeVertex = edge . getInVertex ( ) ; vertices . push ( compositeVertex ) ; } } } break ; default : } } } return result ; |
public class InboundNatRulesInner { /** * Creates or updates a load balancer inbound nat rule .
* @ param resourceGroupName The name of the resource group .
* @ param loadBalancerName The name of the load balancer .
* @ param inboundNatRuleName The name of the inbound nat rule .
* @ param inboundNatRuleParameters Parameters supplied to the create or update inbound nat rule operation .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ throws CloudException thrown if the request is rejected by server
* @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
* @ return the InboundNatRuleInner object if successful . */
public InboundNatRuleInner beginCreateOrUpdate ( String resourceGroupName , String loadBalancerName , String inboundNatRuleName , InboundNatRuleInner inboundNatRuleParameters ) { } } | return beginCreateOrUpdateWithServiceResponseAsync ( resourceGroupName , loadBalancerName , inboundNatRuleName , inboundNatRuleParameters ) . toBlocking ( ) . single ( ) . body ( ) ; |
public class ConstantIntegerInfo { /** * Will return either a new ConstantIntegerInfo object or one already in
* the constant pool . If it is a new ConstantIntegerInfo , it will be
* inserted into the pool . */
static ConstantIntegerInfo make ( ConstantPool cp , int value ) { } } | ConstantInfo ci = new ConstantIntegerInfo ( value ) ; return ( ConstantIntegerInfo ) cp . addConstant ( ci ) ; |
public class JMessageClient { /** * Add or remove members from a group
* @ param gid The group id
* @ param addList If this parameter is null then send remove request
* @ param removeList If this parameter is null then send add request
* @ throws APIConnectionException connect exception
* @ throws APIRequestException request exception */
public void addOrRemoveMembers ( long gid , String [ ] addList , String [ ] removeList ) throws APIConnectionException , APIRequestException { } } | Members add = Members . newBuilder ( ) . addMember ( addList ) . build ( ) ; Members remove = Members . newBuilder ( ) . addMember ( removeList ) . build ( ) ; _groupClient . addOrRemoveMembers ( gid , add , remove ) ; |
public class SVMLightParser { /** * Try to parse the bytes as svm light format , return a ParseSetupHandler with type
* SVMLight if the input is in svm light format , throw an exception otherwise . */
public static ParseSetup guessSetup ( byte [ ] bits ) { } } | int lastNewline = bits . length - 1 ; while ( lastNewline > 0 && ! CsvParser . isEOL ( bits [ lastNewline ] ) ) lastNewline -- ; if ( lastNewline > 0 ) bits = Arrays . copyOf ( bits , lastNewline + 1 ) ; SVMLightParser p = new SVMLightParser ( new ParseSetup ( SVMLight_INFO , ParseSetup . GUESS_SEP , false , ParseSetup . GUESS_HEADER , ParseSetup . GUESS_COL_CNT , null , null , null , null , null ) , null ) ; SVMLightInspectParseWriter dout = new SVMLightInspectParseWriter ( ) ; p . parseChunk ( 0 , new ByteAryData ( bits , 0 ) , dout ) ; if ( dout . _ncols > 0 && dout . _nlines > 0 && dout . _nlines > dout . _invalidLines ) return new ParseSetup ( SVMLight_INFO , ParseSetup . GUESS_SEP , false , ParseSetup . NO_HEADER , dout . _ncols , null , dout . guessTypes ( ) , null , null , dout . _data , dout . removeErrors ( ) ) ; else throw new ParseDataset . H2OParseException ( "Could not parse file as an SVMLight file." ) ; |
public class SourceStream { /** * This is used to set the sendWindow defined in Admin panels */
public synchronized void setDefinedSendWindow ( long newSendWindow ) { } } | if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( this , tc , "setDefinedSendWindow" , Long . valueOf ( newSendWindow ) ) ; definedSendWindow = newSendWindow ; // PK41355 - Commenting out - should not be sending stuff at reconstitute
// updateAndPersistSendWindow ( ) ;
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "setDefinedSendWindow" , Long . valueOf ( newSendWindow ) ) ; |
public class AbstractRadial { /** * Enables / disables the usage of a transparent color for filling areas
* @ param TRANSPARENT _ AREAS _ ENABLED */
public void setTransparentAreasEnabled ( final boolean TRANSPARENT_AREAS_ENABLED ) { } } | transparentAreasEnabled = TRANSPARENT_AREAS_ENABLED ; init ( getInnerBounds ( ) . width , getInnerBounds ( ) . height ) ; repaint ( getInnerBounds ( ) ) ; |
public class KafkaPusher { /** * Actually creates the Kafka producer . */
protected ProducerCloseable < String , byte [ ] > createProducer ( ProducerConfig config ) { } } | return this . closer . register ( new ProducerCloseable < String , byte [ ] > ( config ) ) ; |
public class InboundTransmissionParser { /** * Invoked to parse a segmented transmission middle payload from the supplied buffer .
* May be invoked multiple times to incrementally parse the structure .
* Once the structure has been fully parsed , transitions the state machine
* into the appropriate next state based on the layout of the transmission .
* @ param contextBuffer */
private void parseSegmentMiddlePayload ( WsByteBuffer contextBuffer ) { } } | if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( this , tc , "parseSegmentMiddlePayload" , contextBuffer ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) JFapUtils . debugTraceWsByteBufferInfo ( this , tc , contextBuffer , "contextBuffer" ) ; WsByteBuffer partialTransmission = inFlightSegmentedTransmissions [ primaryHeaderFields . priority ] ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) SibTr . debug ( this , tc , "partial transmission in slot " + primaryHeaderFields . priority + " = " + partialTransmission ) ; int contextBufferRemaining = contextBuffer . remaining ( ) ; if ( partialTransmission == null ) { throwable = new SIConnectionLostException ( nls . getFormattedMessage ( "TRANSPARSER_PROTOCOLERROR_SICJ0053" , new Object [ ] { connection . remoteHostAddress , connection . chainName } , "TRANSPARSER_PROTOCOLERROR_SICJ0053" ) ) ; // D226223
// This FFDC was generated because our peer sent the middle segment of a segmented
// transmission without first sending the first segment .
FFDCFilter . processException ( throwable , "com.ibm.ws.sib.jfapchannel.impl.InboundTransmissionParser" , JFapChannelConstants . INBOUNDXMITPARSER_PARSESMPAYLOAD_01 , getFormattedBytes ( contextBuffer ) ) ; // D267629
state = STATE_ERROR ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) SibTr . debug ( this , tc , "Received the middle segment of a segmented transmission prior to receiving a start segment." ) ; } else if ( partialTransmission . remaining ( ) < transmissionPayloadRemaining ) { throwable = new SIConnectionLostException ( nls . getFormattedMessage ( "TRANSPARSER_PROTOCOLERROR_SICJ0053" , new Object [ ] { connection . remoteHostAddress , connection . chainName } , "TRANSPARSER_PROTOCOLERROR_SICJ0053" ) ) ; // D226223
// This FFDC was generated because our peer sent a middle segment of a segmented
// transmission that would have made the overall transmitted data length greater
// than that suggested in the first segment .
FFDCFilter . processException ( throwable , "com.ibm.ws.sib.jfapchannel.impl.InboundTransmissionParser" , JFapChannelConstants . INBOUNDXMITPARSER_PARSESMPAYLOAD_02 , getFormattedBytes ( contextBuffer ) ) ; // D267629
state = STATE_ERROR ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) SibTr . debug ( this , tc , "Received a middle segment for a segmented transmission which makes the transmission larger than the peer indicated in the first segment." ) ; } else { int amountCopied = JFapUtils . copyWsByteBuffer ( contextBuffer , partialTransmission , transmissionPayloadRemaining ) ; transmissionPayloadRemaining -= amountCopied ; // begin F193735.3
if ( type == Conversation . ME ) meReadBytes -= amountCopied ; else if ( type == Conversation . CLIENT ) clientReadBytes -= amountCopied ; // end F193735.3
needMoreData = ( amountCopied == contextBufferRemaining ) ; if ( ! needMoreData ) reset ( ) ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( this , tc , "parseSegmentMiddlePayload" ) ; |
public class RedisClusterManagerFactory { /** * 根据配置构建一个RedissonClient
* @ param redisBaseConfig 配置文件
* @ return 构建的RedissonClient */
private static RedissonClient buildRedissonClient ( RedisBaseConfig redisBaseConfig ) { } } | Config config = new Config ( ) ; if ( redisBaseConfig instanceof RedisSingleServerConfig ) { RedisSingleServerConfig redisSingleServerConfig = ( RedisSingleServerConfig ) redisBaseConfig ; SingleServerConfig singleServerConfig = config . useSingleServer ( ) ; BeanUtils . copy ( singleServerConfig , redisSingleServerConfig ) ; // 该字段write方法和get方法类型不一致 , 需要手动处理
singleServerConfig . setAddress ( redisSingleServerConfig . getAddress ( ) ) ; } else { throw new IllegalArgumentException ( "位置的配置类型:" + redisBaseConfig . getClass ( ) ) ; } return Redisson . create ( config ) ; |
public class ModbusMaster { /** * This function code is used to write a block of contiguous registers ( 1 to 123 registers ) in a
* remote device .
* The requested written values are specified in the request data field . Data is packed as two
* bytes per register .
* @ param serverAddress a slave address
* @ param startAddress the address of the registers to be written
* @ param registers the register data
* @ throws ModbusProtocolException if modbus - exception is received
* @ throws ModbusNumberException if response is invalid
* @ throws ModbusIOException if remote slave is unavailable */
final public void writeMultipleRegisters ( int serverAddress , int startAddress , int [ ] registers ) throws ModbusProtocolException , ModbusNumberException , ModbusIOException { } } | processRequest ( ModbusRequestBuilder . getInstance ( ) . buildWriteMultipleRegisters ( serverAddress , startAddress , registers ) ) ; |
public class RestRequestHandler { /** * { @ inheritDoc } */
@ Override @ GET @ Path ( "/responses" ) public Collection < Response > getResponses ( @ QueryParam ( "earliest" ) String earliest ) throws NotAuthorizedException { } } | accessControlUtils . checkAuthorization ( Action . GET_RESPONSES , requestContext ) ; SearchCriteria criteria = new SearchCriteria ( ) . setDetectionSystemIds ( StringUtils . toCollection ( getClientApplicationName ( ) ) ) . setEarliest ( earliest ) ; return appSensorServer . getResponseStore ( ) . findResponses ( criteria ) ; |
public class SVGRenderer { private void writeHeader ( ) { } } | out . println ( "<?xml version=\"1.0\" encoding=\"utf-8\" standalone=\"no\"?>" ) ; out . println ( "<!DOCTYPE svg PUBLIC \"-//W3C//DTD SVG 20010904//EN\" \"http://www.w3.org/TR/2001/REC-SVG-20010904/DTD/svg10.dtd\">" ) ; out . println ( "<!-- Rendered by CSSBox http://cssbox.sourceforge.net -->" ) ; out . println ( "<svg xmlns=\"http://www.w3.org/2000/svg\"" ) ; out . println ( " xmlns:xlink=\"http://www.w3.org/1999/xlink\" xml:space=\"preserve\"" ) ; out . println ( " width=\"" + rootw + "\" height=\"" + rooth + "px\"" ) ; out . println ( " viewBox=\"0 0 " + rootw + " " + rooth + "\"" ) ; out . println ( " zoomAndPan=\"disable\" >" ) ; |
public class MultiAssetResponsiveDisplayAd { /** * Gets the squareMarketingImages value for this MultiAssetResponsiveDisplayAd .
* @ return squareMarketingImages * Square marketing image to be used in the ad . This image may
* be used when a square aspect ratio
* is more appropriate than the aspect ratio of the
* { @ link # marketingImage } image . This ad format
* does not allow the creation of an imageAsset using
* the ImageAsset . imageData field . An
* imageAsset must first be created using the AssetService ,
* and ImageAsset . assetId must be
* populated in this field . Valid image types are
* GIF , JPEG , and PNG . The minimum size is 300x300
* the aspect ratio must be 1:1 ( + - 1 % ) . One squareMarketingImage
* is required . Support up to 15
* with marketingImages .
* < span class = " constraint Selectable " > This field
* can be selected using the value " MultiAssetResponsiveDisplayAdSquareMarketingImages " . < / span >
* < span class = " constraint Required " > This field is required and should
* not be { @ code null } when it is contained within { @ link Operator } s
* : ADD . < / span > */
public com . google . api . ads . adwords . axis . v201809 . cm . AssetLink [ ] getSquareMarketingImages ( ) { } } | return squareMarketingImages ; |
public class JMXContext { /** * Get the { @ link MemoryPoolMXBean } with one of the given names . The first
* matching memory pool is returned . If no memory pool exists for the given
* names , then < code > null < / code > is returned .
* @ param names The names of the memory pools to search
* @ return The associated memory pool or < code > null < / code >
* @ see # getMemoryPoolMXBeans ( ) */
public MemoryPoolMXBean getMemoryPoolMXBean ( String [ ] names ) { } } | for ( String name : names ) { MemoryPoolMXBean bean = getMemoryPoolMXBean ( name ) ; if ( bean != null ) { return bean ; } } return null ; |
public class RtfDestinationMgr { /** * Adds a < CODE > RtfDestinationListener < / CODE > to the appropriate < CODE > RtfDestination < / CODE > .
* @ param destination the destination string for the listener
* @ param listener
* the new RtfDestinationListener . */
public static boolean addListener ( String destination , RtfDestinationListener listener ) { } } | RtfDestination dest = getDestination ( destination ) ; if ( dest != null ) { return dest . addListener ( listener ) ; } return false ; |
public class ShowcaseAd { /** * Get Custom Deserializer */
public static org . apache . axis . encoding . Deserializer getDeserializer ( java . lang . String mechType , java . lang . Class _javaType , javax . xml . namespace . QName _xmlType ) { } } | return new org . apache . axis . encoding . ser . BeanDeserializer ( _javaType , _xmlType , typeDesc ) ; |
public class ContainerDefinition { /** * The secrets to pass to the container . For more information , see < a
* href = " http : / / docs . aws . amazon . com / AmazonECS / latest / developerguide / specifying - sensitive - data . html " > Specifying
* Sensitive Data < / a > in the < i > Amazon Elastic Container Service Developer Guide < / i > .
* @ return The secrets to pass to the container . For more information , see < a
* href = " http : / / docs . aws . amazon . com / AmazonECS / latest / developerguide / specifying - sensitive - data . html "
* > Specifying Sensitive Data < / a > in the < i > Amazon Elastic Container Service Developer Guide < / i > . */
public java . util . List < Secret > getSecrets ( ) { } } | if ( secrets == null ) { secrets = new com . amazonaws . internal . SdkInternalList < Secret > ( ) ; } return secrets ; |
public class FolderJob { /** * Get a list of all the defined jobs in this folder
* @ return list of defined jobs ( summary level , for details { @ link Job # details ( ) } . */
public Map < String , Job > getJobs ( ) { } } | // FIXME : Check for null of jobs ? Can that happen ?
return jobs . stream ( ) . map ( SET_CLIENT ( this . client ) ) . collect ( Collectors . toMap ( k -> k . getName ( ) , Function . identity ( ) ) ) ; |
public class DefaultUrlCreator { /** * Appends a URL token to the buffer */
private void appendUrlToken ( FastStringWriter actualUriBuf , Object token , String charset ) { } } | actualUriBuf . append ( SLASH ) . append ( urlEncode ( token , charset ) ) ; |
public class DataClient { /** * shorthand for getTable
* @ param name
* @ return */
@ CallerSideMethod public RealLiveTable tbl ( String name ) { } } | return ( RealLiveTable ) getActor ( ) . syncTableAccess . get ( name ) ; |
public class Vector { /** * Parses { @ link Vector } from the given Matrix Market .
* @ param is the input stream in Matrix Market format
* @ return a parsed vector
* @ exception IOException if an I / O error occurs . */
public static Vector fromMatrixMarket ( InputStream is ) throws IOException { } } | StreamTokenizer tokenizer = new StreamTokenizer ( new BufferedReader ( new InputStreamReader ( is ) ) ) ; tokenizer . wordChars ( '%' , '%' ) ; tokenizer . nextToken ( ) ; if ( ! "%%MatrixMarket" . equals ( tokenizer . sval ) ) { throw new IllegalArgumentException ( "Wrong input file format: can not read header '%%MatrixMarket'." ) ; } tokenizer . nextToken ( ) ; String object = tokenizer . sval ; if ( ! "vector" . equals ( object ) ) { throw new IllegalArgumentException ( "Unexpected object: " + object + "." ) ; } tokenizer . nextToken ( ) ; String format = tokenizer . sval ; if ( ! "coordinate" . equals ( format ) && ! "array" . equals ( format ) ) { throw new IllegalArgumentException ( "Unknown format: " + format + "." ) ; } tokenizer . nextToken ( ) ; String field = tokenizer . sval ; if ( ! "real" . equals ( field ) ) { throw new IllegalArgumentException ( "Unknown field type: " + field + "." ) ; } tokenizer . nextToken ( ) ; int length = ( int ) Math . round ( tokenizer . nval ) ; if ( "coordinate" . equals ( format ) ) { tokenizer . nextToken ( ) ; int cardinality = ( int ) Math . round ( tokenizer . nval ) ; Vector result = SparseVector . zero ( length , cardinality ) ; for ( int k = 0 ; k < cardinality ; k ++ ) { tokenizer . nextToken ( ) ; int i = ( int ) Math . round ( tokenizer . nval ) ; tokenizer . nextToken ( ) ; double x = tokenizer . nval ; result . set ( i - 1 , x ) ; } return result ; } else { Vector result = DenseVector . zero ( length ) ; for ( int i = 0 ; i < length ; i ++ ) { tokenizer . nextToken ( ) ; result . set ( i , tokenizer . nval ) ; } return result ; } |
public class BackupManagerImpl { /** * { @ inheritDoc } */
public JobWorkspaceRestore getLastRestore ( String repositoryName , String workspaceName ) { } } | for ( int i = restoreJobs . size ( ) - 1 ; i >= 0 ; i -- ) { JobWorkspaceRestore job = restoreJobs . get ( i ) ; if ( repositoryName . equals ( job . getRepositoryName ( ) ) && workspaceName . equals ( job . getWorkspaceName ( ) ) ) { return job ; } } return null ; |
public class FastDateFormat { /** * < p > Formats a millisecond { @ code long } value into the
* supplied { @ code StringBuffer } . < / p >
* @ param millis the millisecond value to format
* @ param buf the buffer to format into
* @ return the specified string buffer
* @ since 2.1
* @ deprecated Use { { @ link # format ( long , Appendable ) } . */
@ Deprecated @ Override public StringBuffer format ( final long millis , final StringBuffer buf ) { } } | return printer . format ( millis , buf ) ; |
public class DateCaster { /** * reads a offset definition at the end of a date string
* @ param timeZone
* @ param dt previous parsed date Object
* @ param ds DateString to parse
* @ param defaultValue
* @ return date Object with offset */
private static DateTime readOffset ( boolean isPlus , TimeZone timeZone , DateTime dt , int years , int months , int days , int hours , int minutes , int seconds , int milliSeconds , DateString ds , boolean checkAfterLast , DateTime defaultValue ) { } } | // timeZone = ThreadLocalPageContext . getTimeZone ( timeZone ) ;
if ( timeZone == null ) return defaultValue ; // HOUR
int hourLength = ds . getPos ( ) ; int hour = ds . readDigits ( ) ; hourLength = ds . getPos ( ) - hourLength ; if ( hour == - 1 ) return defaultValue ; // MINUTE
int minute = 0 ; if ( ! ds . isAfterLast ( ) ) { if ( ! ( ds . fwIfCurrent ( ':' ) || ds . fwIfCurrent ( '.' ) ) ) return defaultValue ; minute = ds . readDigits ( ) ; if ( minute == - 1 ) return defaultValue ; } else if ( hourLength > 2 ) { int h = hour / 100 ; minute = hour - ( h * 100 ) ; hour = h ; } if ( minute > 59 ) return defaultValue ; if ( hour > 14 || ( hour == 14 && minute > 0 ) ) return defaultValue ; long offset = hour * 60L * 60L * 1000L ; offset += minute * 60 * 1000 ; if ( ! checkAfterLast || ds . isAfterLast ( ) ) { long time = util . toTime ( TimeZoneConstants . UTC , years , months , days , hours , minutes , seconds , milliSeconds , 0 ) ; if ( isPlus ) time -= offset ; else time += offset ; return new DateTimeImpl ( time , false ) ; } return defaultValue ; |
public class ControlBeanContextSupport { /** * Fire a BeanContextMembershipEvent .
* @ param bcme Event to fire .
* @ param childrenAdded True if add event , false if remove event . */
private void fireMembershipEvent ( BeanContextMembershipEvent bcme , boolean childrenAdded ) { } } | for ( BeanContextMembershipListener bcml : _bcMembershipListeners ) { if ( childrenAdded ) { bcml . childrenAdded ( bcme ) ; } else { bcml . childrenRemoved ( bcme ) ; } } |
public class Traversals { /** * Post - order traversal of all branching nodes in a suffix tree ( emulated using a
* suffix array and the LCP array ) . Post - order traversal is also called < i > bottom - up
* traversal < / i > that is child nodes are reported before parent nodes ( and the root is
* the last node to process ) .
* The algorithm implemented here is from < i > Efficient Substring Traversal with Suffix
* Arrays < / i > by Toru Kasai , Hiroki Arimura and Setsuo Arikawa , Dept of Informatics ,
* Kyushu University , Japan .
* @ param sequenceLength Input sequence length for the suffix array and LCP array .
* @ param sa Suffix array .
* @ param lcp Corresponding LCP array for a given suffix array .
* @ param visitor Callback visitor computing aggregate values when traversing the
* tree .
* @ param epsilon " Zero " value ( epsilon ) for computations . */
public static < E > void postorder ( final int sequenceLength , int [ ] sa , int [ ] lcp , E epsilon , IPostOrderComputingVisitor < E > visitor ) { } } | assert sequenceLength <= sa . length && sequenceLength <= lcp . length : "Input sequence length larger than suffix array or the LCP." ; final Deque < Integer > stack = new ArrayDeque < > ( ) ; final ArrayList < E > values = new ArrayList < > ( ) ; // Push the stack bottom marker ( sentinel ) .
stack . push ( - 1 ) ; stack . push ( - 1 ) ; values . add ( epsilon ) ; // Process every leaf .
int top_h ; E top_c ; for ( int i = 0 ; i <= sequenceLength ; i ++ ) { final int h = ( sequenceLength == i ? - 1 : lcp [ i ] ) ; E ci = epsilon ; while ( true ) { top_h = stack . peek ( ) ; if ( top_h <= h ) break ; stack . pop ( ) ; // Visit the node and remove it from the end of the stack .
top_c = values . remove ( values . size ( ) - 1 ) ; final int top_i = stack . pop ( ) ; final boolean leaf = ( top_i < 0 ) ; ci = visitor . aggregate ( top_c , ci ) ; visitor . visitNode ( sa [ leaf ? - ( top_i + 1 ) : top_i ] , top_h , leaf , ci ) ; top_c = values . get ( values . size ( ) - 1 ) ; } if ( top_h < h ) { stack . push ( i ) ; stack . push ( h ) ; values . add ( ci ) ; } else { assert top_h == h ; final int index = values . size ( ) - 1 ; values . set ( index , visitor . aggregate ( ci , values . get ( index ) ) ) ; } if ( i < sequenceLength ) { // Mark leaf nodes in the stack .
stack . push ( - ( i + 1 ) ) ; stack . push ( sequenceLength - sa [ i ] ) ; values . add ( visitor . leafValue ( i , sa [ i ] , sequenceLength - sa [ i ] ) ) ; } } |
public class OAuthCodec { /** * Encode the specified value .
* @ param value The value to encode .
* @ return The encoded value . */
public static String oauthEncode ( String value ) { } } | if ( value == null ) { return "" ; } try { return new String ( URLCodec . encodeUrl ( SAFE_CHARACTERS , value . getBytes ( "UTF-8" ) ) , "US-ASCII" ) ; } catch ( UnsupportedEncodingException e ) { throw new RuntimeException ( e ) ; } |
public class RetouchedBloomFilter { /** * Chooses the bit position that minimizes the number of false negative generated while maximizing .
* the number of false positive removed .
* @ param h The different bit positions .
* @ return The position that minimizes the number of false negative generated while maximizing . */
private int ratioRemove ( int [ ] h ) { } } | computeRatio ( ) ; int minIndex = Integer . MAX_VALUE ; double minValue = Double . MAX_VALUE ; for ( int i = 0 ; i < nbHash ; i ++ ) { if ( ratio [ h [ i ] ] < minValue ) { minValue = ratio [ h [ i ] ] ; minIndex = h [ i ] ; } } return minIndex ; |
public class JavaZipFileSystem { /** * { @ inheritDoc } */
public InputStream openInputStream ( VirtualFile mountPoint , VirtualFile target ) throws IOException { } } | final ZipNode zipNode = getExistingZipNode ( mountPoint , target ) ; final File cachedFile = zipNode . cachedFile ; if ( cachedFile != null ) { return new FileInputStream ( cachedFile ) ; } if ( rootNode == zipNode ) { return new FileInputStream ( archiveFile ) ; } final JarEntry entry = zipNode . entry ; if ( entry == null ) { throw VFSMessages . MESSAGES . notAFile ( target . getPathName ( ) ) ; } return zipFile . getInputStream ( entry ) ; |
public class CarrierRefresher { /** * Helper method to transparently rearrange the node list based on the current global offset .
* @ param nodeList the list to shift . */
< T > void shiftNodeList ( List < T > nodeList ) { } } | int shiftBy = ( int ) ( nodeOffset ++ % nodeList . size ( ) ) ; for ( int i = 0 ; i < shiftBy ; i ++ ) { T element = nodeList . remove ( 0 ) ; nodeList . add ( element ) ; } |
public class JQLChecker { /** * Extract columns to insert or update .
* @ param jqlContext
* the jql context
* @ param jqlValue
* the jql value
* @ param entity
* the entity
* @ return the sets the */
public Set < String > extractColumnsToInsertOrUpdate ( final JQLContext jqlContext , String jqlValue , final Finder < SQLProperty > entity ) { } } | final Set < String > result = new LinkedHashSet < String > ( ) ; final One < Boolean > selectionOn = new One < Boolean > ( null ) ; final One < Boolean > insertOn = new One < Boolean > ( null ) ; // Column _ name _ set is needed for insert
// Columns _ to _ update is needed for update
analyzeInternal ( jqlContext , jqlValue , new JqlBaseListener ( ) { @ Override public void enterColumn_name_set ( Column_name_setContext ctx ) { if ( insertOn . value0 == null ) { insertOn . value0 = true ; } } @ Override public void exitColumn_name_set ( Column_name_setContext ctx ) { insertOn . value0 = false ; } @ Override public void enterColumns_to_update ( Columns_to_updateContext ctx ) { if ( selectionOn . value0 == null ) { selectionOn . value0 = true ; } } @ Override public void exitColumns_to_update ( Columns_to_updateContext ctx ) { selectionOn . value0 = false ; } @ Override public void enterColumn_name ( Column_nameContext ctx ) { // works for INSERTS
if ( insertOn . value0 != null && insertOn . value0 == true ) { result . add ( ctx . getText ( ) ) ; } } @ Override public void enterColumn_name_to_update ( Column_name_to_updateContext ctx ) { result . add ( ctx . getText ( ) ) ; } } ) ; return result ; |
public class OneShotSQLGeneratorEngine { /** * We look for the ID in the list of IDs , if it ' s not there , then we return - 2,
* which we know will never appear on the DB . This is correct because if a
* constant appears in a query , and that constant was never inserted in the
* DB , the query must be empty ( that atom ) , by putting - 2 as id , we will
* enforce that .
* @ param uri
* @ return */
private int getUriid ( String uri ) { } } | Integer id = uriRefIds . getId ( uri ) ; if ( id != null ) return id ; return - 2 ; |
public class InmemoryNodeTypeRepository { /** * { @ inheritDoc } */
public void removeNodeType ( NodeTypeData nodeType ) { } } | InternalQName nodeTypeName = nodeType . getName ( ) ; // put supers
final Set < InternalQName > supers = hierarchy . getSubtypes ( nodeTypeName ) ; // remove from internal lists
hierarchy . removeNodeType ( nodeTypeName ) ; // remove supers
if ( supers != null ) { for ( final InternalQName superName : supers ) { defsHolder . removeDefinitions ( nodeTypeName , hierarchy . getNodeType ( superName ) ) ; } } // remove it self
defsHolder . removeDefinitions ( nodeTypeName , nodeType ) ; haveTypes = hierarchy . getAllNodeTypes ( ) . size ( ) > 0 ; |
public class PhotosApi { /** * Returns next and previous photos for a photo in a photostream .
* < br >
* This method does not require authentication .
* @ param photoId Required . The id of the photo to fetch the context for .
* @ return object with the context of the photo .
* @ throws JinxException if the photo id is null or empty , or if there are any errors .
* @ see < a href = " https : / / www . flickr . com / services / api / flickr . photos . getContext . html " > flickr . photos . getContext < / a > */
public Context getContext ( String photoId ) throws JinxException { } } | JinxUtils . validateParams ( photoId ) ; Map < String , String > params = new TreeMap < > ( ) ; params . put ( "method" , "flickr.photos.getContext" ) ; params . put ( "photo_id" , photoId ) ; return jinx . flickrGet ( params , Context . class ) ; |
public class SingleCouponMap { /** * Returns entryIndex if the given key is found . The coupon may be valid or contain a table index .
* If not found , returns one ' s complement entryIndex
* of an empty slot for insertion , which may be over a deleted key .
* @ param key the given key
* @ return the entryIndex */
@ Override int findKey ( final byte [ ] key ) { } } | final long [ ] hash = MurmurHash3 . hash ( key , SEED ) ; int entryIndex = getIndex ( hash [ 0 ] , tableEntries_ ) ; final int stride = getStride ( hash [ 1 ] , tableEntries_ ) ; final int loopIndex = entryIndex ; do { if ( couponsArr_ [ entryIndex ] == 0 ) { return ~ entryIndex ; // empty
} if ( Map . arraysEqual ( key , 0 , keysArr_ , entryIndex * keySizeBytes_ , keySizeBytes_ ) ) { return entryIndex ; } entryIndex = ( entryIndex + stride ) % tableEntries_ ; } while ( entryIndex != loopIndex ) ; throw new SketchesArgumentException ( "Key not found and no empty slots!" ) ; |
public class ControllerHandler { /** * Configures the content - type suffixes
* @ param engines
* @ return acceptable content - type suffixes */
protected Set < String > configureContentTypeSuffixes ( ContentTypeEngines engines ) { } } | if ( null == ClassUtil . getAnnotation ( method , ContentTypeBySuffix . class ) ) { return Collections . emptySet ( ) ; } Set < String > suffixes = new TreeSet < > ( ) ; for ( String suffix : engines . getContentTypeSuffixes ( ) ) { String contentType = engines . getContentTypeEngine ( suffix ) . getContentType ( ) ; if ( declaredProduces . contains ( contentType ) ) { suffixes . add ( suffix ) ; } } return suffixes ; |
public class AsmUtils { /** * Changes the access level for the specified field for a class .
* @ param clazz the clazz
* @ param fieldName the field name
* @ param srgName the srg name
* @ param silenced the silenced
* @ return the field */
public static Field changeFieldAccess ( Class < ? > clazz , String fieldName , String srgName , boolean silenced ) { } } | try { Field f = clazz . getDeclaredField ( MalisisCore . isObfEnv ? srgName : fieldName ) ; f . setAccessible ( true ) ; Field modifiers = Field . class . getDeclaredField ( "modifiers" ) ; modifiers . setAccessible ( true ) ; modifiers . setInt ( f , f . getModifiers ( ) & ~ Modifier . FINAL ) ; return f ; } catch ( ReflectiveOperationException e ) { if ( ! silenced ) MalisisCore . log . error ( "Could not change access for field " + clazz . getSimpleName ( ) + "." + ( MalisisCore . isObfEnv ? srgName : fieldName ) , e ) ; return null ; } |
public class GoogleMapShapeConverter { /** * Convert a { @ link CurvePolygon } to a { @ link PolygonOptions }
* @ param curvePolygon curve polygon
* @ return polygon options
* @ since 1.4.1 */
public PolygonOptions toCurvePolygon ( CurvePolygon curvePolygon ) { } } | PolygonOptions polygonOptions = new PolygonOptions ( ) ; List < Curve > rings = curvePolygon . getRings ( ) ; if ( ! rings . isEmpty ( ) ) { Double z = null ; // Add the polygon points
Curve curve = rings . get ( 0 ) ; if ( curve instanceof CompoundCurve ) { CompoundCurve compoundCurve = ( CompoundCurve ) curve ; for ( LineString lineString : compoundCurve . getLineStrings ( ) ) { // Try to simplify the number of points in the compound curve
List < Point > points = simplifyPoints ( lineString . getPoints ( ) ) ; for ( Point point : points ) { LatLng latLng = toLatLng ( point ) ; polygonOptions . add ( latLng ) ; if ( point . hasZ ( ) ) { z = ( z == null ) ? point . getZ ( ) : Math . max ( z , point . getZ ( ) ) ; } } } } else if ( curve instanceof LineString ) { LineString lineString = ( LineString ) curve ; // Try to simplify the number of points in the curve
List < Point > points = simplifyPoints ( lineString . getPoints ( ) ) ; for ( Point point : points ) { LatLng latLng = toLatLng ( point ) ; polygonOptions . add ( latLng ) ; if ( point . hasZ ( ) ) { z = ( z == null ) ? point . getZ ( ) : Math . max ( z , point . getZ ( ) ) ; } } } else { throw new GeoPackageException ( "Unsupported Curve Type: " + curve . getClass ( ) . getSimpleName ( ) ) ; } // Add the holes
for ( int i = 1 ; i < rings . size ( ) ; i ++ ) { Curve hole = rings . get ( i ) ; List < LatLng > holeLatLngs = new ArrayList < LatLng > ( ) ; if ( hole instanceof CompoundCurve ) { CompoundCurve holeCompoundCurve = ( CompoundCurve ) hole ; for ( LineString holeLineString : holeCompoundCurve . getLineStrings ( ) ) { // Try to simplify the number of points in the hole
List < Point > holePoints = simplifyPoints ( holeLineString . getPoints ( ) ) ; for ( Point point : holePoints ) { LatLng latLng = toLatLng ( point ) ; holeLatLngs . add ( latLng ) ; if ( point . hasZ ( ) ) { z = ( z == null ) ? point . getZ ( ) : Math . max ( z , point . getZ ( ) ) ; } } } } else if ( hole instanceof LineString ) { LineString holeLineString = ( LineString ) hole ; // Try to simplify the number of points in the hole
List < Point > holePoints = simplifyPoints ( holeLineString . getPoints ( ) ) ; for ( Point point : holePoints ) { LatLng latLng = toLatLng ( point ) ; holeLatLngs . add ( latLng ) ; if ( point . hasZ ( ) ) { z = ( z == null ) ? point . getZ ( ) : Math . max ( z , point . getZ ( ) ) ; } } } else { throw new GeoPackageException ( "Unsupported Curve Hole Type: " + hole . getClass ( ) . getSimpleName ( ) ) ; } polygonOptions . addHole ( holeLatLngs ) ; } if ( curvePolygon . hasZ ( ) && z != null ) { polygonOptions . zIndex ( z . floatValue ( ) ) ; } } return polygonOptions ; |
public class ContextSerializerFactory { /** * Returns the serializer for a given class . */
public Serializer getSerializer ( String className ) { } } | Serializer serializer = _serializerClassMap . get ( className ) ; if ( serializer == AbstractSerializer . NULL ) return null ; else return serializer ; |
public class CodeGenerator { /** * If we ' re at the start of an arrow function body , we need parentheses around object literals and
* object patterns . We also must also pass the IN _ FOR _ INIT _ CLAUSE flag into subexpressions . */
private static Context getContextForArrowFunctionBody ( Context context ) { } } | return context . inForInInitClause ( ) ? Context . START_OF_ARROW_FN_IN_FOR_INIT : Context . START_OF_ARROW_FN_BODY ; |
public class MimeTypeUtils { /** * Parse the given , comma - separated string into a list of { @ code MimeType } objects .
* @ param mimeTypes the string to parse
* @ return the list of mime types
* @ throws IllegalArgumentException if the string cannot be parsed */
public static List < MimeType > parseMimeTypes ( String mimeTypes ) { } } | if ( ! Strings . isNullOrEmpty ( mimeTypes ) ) { return Collections . emptyList ( ) ; } String [ ] tokens = mimeTypes . split ( ",\\s*" ) ; List < MimeType > result = new ArrayList < MimeType > ( tokens . length ) ; for ( String token : tokens ) { result . add ( parseMimeType ( token ) ) ; } return result ; |
public class UserTransactionImpl { /** * Stop
* @ exception Throwable Thrown if an error occurs */
public void stop ( ) throws Throwable { } } | Context context = new InitialContext ( ) ; context . unbind ( JNDI_NAME ) ; context . close ( ) ; |
public class ResultSetUtility { /** * Convert all rows of the ResultSet to a Map . The keys of the Map are property names transformed from column names .
* @ param rs the result set
* @ param alreadyDeterminedMappingsthe mapping of column label / name and column meta data
* @ returnA list of Map representations of all the rows in the result set
* @ throws SQLException */
public List < Map < String , Object > > convertAllToMaps ( ResultSet rs , Map < String , ColumnMetaData > alreadyDeterminedMappings ) throws SQLException { } } | ResultSetMetaData rsmd = rs . getMetaData ( ) ; Map < String , ColumnMetaData > columnToPropertyMappings = alreadyDeterminedMappings ; if ( columnToPropertyMappings == null ) { columnToPropertyMappings = createColumnToPropertyMappings ( rsmd ) ; } List < Map < String , Object > > list = new LinkedList < Map < String , Object > > ( ) ; while ( rs . next ( ) ) { Map < String , Object > map = convertToMap ( rs , columnToPropertyMappings ) ; list . add ( map ) ; } return list ; |
public class Misc { /** * in CBLMisc . m
* id CBLKeyForPrefixMatch ( id key , unsigned depth ) */
public static Object keyForPrefixMatch ( Object key , int depth ) { } } | if ( depth < 1 ) return key ; if ( key instanceof String ) { return ( key + "\uFFFF" ) ; } else if ( key instanceof List ) { ArrayList < Map < String , Object > > nuKey = new ArrayList < Map < String , Object > > ( ( List < Map < String , Object > > ) key ) ; if ( depth == 1 ) { nuKey . add ( new HashMap < String , Object > ( ) ) ; } else { Object lastObject = nuKey . get ( nuKey . size ( ) - 1 ) ; lastObject = keyForPrefixMatch ( lastObject , depth - 1 ) ; nuKey . set ( nuKey . size ( ) - 1 , ( Map < String , Object > ) lastObject ) ; } return nuKey ; } else { return key ; } |
public class ValueExpressionImpl { /** * ( non - Javadoc )
* @ see javax . el . ValueExpression # setValue ( javax . el . ELContext ,
* java . lang . Object ) */
@ Override public void setValue ( ELContext context , Object value ) throws PropertyNotFoundException , PropertyNotWritableException , ELException { } } | EvaluationContext ctx = new EvaluationContext ( context , this . fnMapper , this . varMapper ) ; context . notifyBeforeEvaluation ( getExpressionString ( ) ) ; this . getNode ( ) . setValue ( ctx , value ) ; context . notifyAfterEvaluation ( getExpressionString ( ) ) ; |
public class Ifc4PackageImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
@ Override public EClass getIfcValveType ( ) { } } | if ( ifcValveTypeEClass == null ) { ifcValveTypeEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( Ifc4Package . eNS_URI ) . getEClassifiers ( ) . get ( 748 ) ; } return ifcValveTypeEClass ; |
public class QueueEntryRow { /** * Extracts the queue name from the KeyValue row , which the row must be a queue entry . */
public static QueueName getQueueName ( String appName , String flowName , KeyValue keyValue ) { } } | return getQueueName ( appName , flowName , keyValue . getBuffer ( ) , keyValue . getRowOffset ( ) , keyValue . getRowLength ( ) ) ; |
public class CmsFileTable { /** * Updates all items with ids from the given list . < p >
* @ param ids the resource structure ids to update
* @ param remove true if the item should be removed only */
public void update ( Collection < CmsUUID > ids , boolean remove ) { } } | for ( CmsUUID id : ids ) { updateItem ( id , remove ) ; } rebuildMenu ( ) ; |
public class ParseContext { /** * Runs { @ code parser } with error recording suppressed . */
final boolean withErrorSuppressed ( Parser < ? > parser ) { } } | boolean oldValue = errorSuppressed ; errorSuppressed = true ; boolean ok = parser . apply ( this ) ; errorSuppressed = oldValue ; return ok ; |
public class MalisisInventoryContainer { /** * Called when this { @ link MalisisInventoryContainer } is closed .
* @ param owner the owner */
@ Override public void onContainerClosed ( EntityPlayer owner ) { } } | super . onContainerClosed ( owner ) ; getInventories ( ) . forEach ( i -> i . removeOpenedContainer ( this ) ) ; |
public class UNode { /** * Get the child node ( member ) of this UNode with the given name . If this UNode isn ' t
* a MAP or there is no child node with the given name , null is returned . Note : the
* UNode returned is not copied .
* @ param name Candidate name of a child member node .
* @ return Child UNode with the given name , if any , otherwise null . */
public UNode getMember ( String name ) { } } | if ( m_childNodeMap == null ) { return null ; } return m_childNodeMap . get ( name ) ; |
public class Helper { /** * obtains the internal JDO lifecycle state of the input StatemanagerInternal .
* This Method is helpful to display persistent objects internal state .
* @ param sm the StateManager to be inspected
* @ return the LifeCycleState of a StateManager instance */
static Object getLCState ( StateManagerInternal sm ) { } } | // unfortunately the LifeCycleState classes are package private .
// so we have to do some dirty reflection hack to access them
try { Field myLC = sm . getClass ( ) . getDeclaredField ( "myLC" ) ; myLC . setAccessible ( true ) ; return myLC . get ( sm ) ; } catch ( NoSuchFieldException e ) { return e ; } catch ( IllegalAccessException e ) { return e ; } |
public class ClassNode { /** * Finds a constructor matching the given parameters in this class .
* @ return the constructor matching the given parameters or null */
public ConstructorNode getDeclaredConstructor ( Parameter [ ] parameters ) { } } | for ( ConstructorNode method : getDeclaredConstructors ( ) ) { if ( parametersEqual ( method . getParameters ( ) , parameters ) ) { return method ; } } return null ; |
public class CmsListPrintDialog { /** * Generates the printable output for the given list . < p >
* @ return html code */
public String generateHtml ( ) { } } | StringBuffer result = new StringBuffer ( 2048 ) ; result . append ( htmlStart ( null ) ) ; result . append ( CmsListExplorerColumn . getExplorerStyleDef ( ) ) ; result . append ( bodyStart ( "dialog" , null ) ) ; result . append ( dialogStart ( ) ) ; result . append ( dialogContentStart ( getParamTitle ( ) ) ) ; result . append ( m_list . printableHtml ( ) ) ; result . append ( dialogContentEnd ( ) ) ; result . append ( dialogEnd ( ) ) ; result . append ( bodyEnd ( ) ) ; result . append ( htmlEnd ( ) ) ; return result . toString ( ) ; |
public class OAuth20TokenAuthorizationResponseBuilder { /** * Build callback url response type string .
* @ param holder the holder
* @ param redirectUri the redirect uri
* @ param accessToken the access token
* @ param params the params
* @ param refreshToken the refresh token
* @ param context the context
* @ return the string
* @ throws Exception the exception */
protected ModelAndView buildCallbackUrlResponseType ( final AccessTokenRequestDataHolder holder , final String redirectUri , final AccessToken accessToken , final List < NameValuePair > params , final RefreshToken refreshToken , final J2EContext context ) throws Exception { } } | val attributes = holder . getAuthentication ( ) . getAttributes ( ) ; val state = attributes . get ( OAuth20Constants . STATE ) . get ( 0 ) . toString ( ) ; val nonce = attributes . get ( OAuth20Constants . NONCE ) . get ( 0 ) . toString ( ) ; val builder = new URIBuilder ( redirectUri ) ; val stringBuilder = new StringBuilder ( ) ; val timeToLive = accessTokenExpirationPolicy . getTimeToLive ( ) ; stringBuilder . append ( OAuth20Constants . ACCESS_TOKEN ) . append ( '=' ) . append ( accessToken . getId ( ) ) . append ( '&' ) . append ( OAuth20Constants . TOKEN_TYPE ) . append ( '=' ) . append ( OAuth20Constants . TOKEN_TYPE_BEARER ) . append ( '&' ) . append ( OAuth20Constants . EXPIRES_IN ) . append ( '=' ) . append ( timeToLive ) ; if ( refreshToken != null ) { stringBuilder . append ( '&' ) . append ( OAuth20Constants . REFRESH_TOKEN ) . append ( '=' ) . append ( refreshToken . getId ( ) ) ; } params . forEach ( p -> stringBuilder . append ( '&' ) . append ( p . getName ( ) ) . append ( '=' ) . append ( p . getValue ( ) ) ) ; if ( StringUtils . isNotBlank ( state ) ) { stringBuilder . append ( '&' ) . append ( OAuth20Constants . STATE ) . append ( '=' ) . append ( EncodingUtils . urlEncode ( state ) ) ; } if ( StringUtils . isNotBlank ( nonce ) ) { stringBuilder . append ( '&' ) . append ( OAuth20Constants . NONCE ) . append ( '=' ) . append ( EncodingUtils . urlEncode ( nonce ) ) ; } builder . setFragment ( stringBuilder . toString ( ) ) ; val url = builder . toString ( ) ; LOGGER . debug ( "Redirecting to URL [{}]" , url ) ; val parameters = new LinkedHashMap < String , String > ( ) ; parameters . put ( OAuth20Constants . ACCESS_TOKEN , accessToken . getId ( ) ) ; if ( refreshToken != null ) { parameters . put ( OAuth20Constants . REFRESH_TOKEN , refreshToken . getId ( ) ) ; } parameters . put ( OAuth20Constants . EXPIRES_IN , timeToLive . toString ( ) ) ; parameters . put ( OAuth20Constants . STATE , state ) ; parameters . put ( OAuth20Constants . NONCE , nonce ) ; parameters . put ( OAuth20Constants . CLIENT_ID , accessToken . getClientId ( ) ) ; return buildResponseModelAndView ( context , servicesManager , accessToken . getClientId ( ) , url , parameters ) ; |
public class ClassIndex { /** * Retrieves names of classes from given package .
* The package must be annotated with { @ link IndexSubclasses } for the classes inside
* to be indexed at compile - time by { @ link org . atteo . classindex . processor . ClassIndexProcessor } .
* @ param packageName name of the package to search classes for
* @ return names of classes from package */
public static Iterable < String > getPackageClassesNames ( String packageName ) { } } | return getPackageClassesNames ( packageName , Thread . currentThread ( ) . getContextClassLoader ( ) ) ; |
public class BinaryUtil { /** * Pretty print byte [ ] with Ascii char and subscript of the array
* @ param array */
public static void print ( byte [ ] array ) { } } | String [ ] name = new String [ ] { "+0" , "+1" , "+2" , "+3" , "+4" , "+5" , "+6" , "+7" , "+8" , "+9" , "+a" , "+b" , "+c" , "+d" , "+e" , "+f" } ; for ( int i = 0 ; i < 16 ; i ++ ) { System . out . print ( name [ i ] + " " ) ; } System . out . println ( ) ; for ( int i = 0 ; i < array . length ; i ++ ) { if ( i != 0 && i % 16 == 0 ) { System . out . println ( ) ; } byte b = array [ i ] ; System . out . print ( String . format ( "%02x" , b ) . toUpperCase ( ) + " " ) ; } System . out . println ( ) ; |
public class GeneratePluginForServer { /** * Parse < user > : < password > @ < host > : < port >
* @ return
* @ throws IllegalArgumentException */
protected ParseLoginAddress parseServerAddressValue ( ) throws IllegalArgumentException { } } | ParseLoginAddress serverAddress = new ParseLoginAddress ( this . inputAddress , this . commandConsole ) ; // parse < user > : < password > @ < host > : < port >
serverAddress . parseLoginAddressValue ( "--server" ) ; return serverAddress ; |
public class HAProxyMessageDecoder { /** * Returns the index in the buffer of the end of line found .
* Returns - 1 if no end of line was found in the buffer . */
private static int findEndOfLine ( final ByteBuf buffer ) { } } | final int n = buffer . writerIndex ( ) ; for ( int i = buffer . readerIndex ( ) ; i < n ; i ++ ) { final byte b = buffer . getByte ( i ) ; if ( b == '\r' && i < n - 1 && buffer . getByte ( i + 1 ) == '\n' ) { return i ; // \ r \ n
} } return - 1 ; // Not found . |
public class Models { /** * Summarize fields which are specific to hex . drf . DRF . DRFModel . */
private static void summarizeDRFModel ( ModelSummary summary , hex . drf . DRF . DRFModel model ) { } } | // add generic fields such as column names
summarizeModelCommonFields ( summary , model ) ; summary . model_algorithm = "BigData RF" ; JsonObject all_params = ( model . get_params ( ) ) . toJSON ( ) ; summary . critical_parameters = whitelistJsonObject ( all_params , DRF_critical_params ) ; summary . secondary_parameters = whitelistJsonObject ( all_params , DRF_secondary_params ) ; summary . expert_parameters = whitelistJsonObject ( all_params , DRF_expert_params ) ; |
public class QuickSelect { /** * The usual swap method .
* @ param data Array
* @ param a First index
* @ param b Second index */
private static final void swap ( double [ ] data , int a , int b ) { } } | double tmp = data [ a ] ; data [ a ] = data [ b ] ; data [ b ] = tmp ; |
public class DisasterRecoveryConfigurationsInner { /** * Fails over from the current primary server to this server .
* @ param resourceGroupName The name of the resource group that contains the resource . You can obtain this value from the Azure Resource Manager API or the portal .
* @ param serverName The name of the server .
* @ param disasterRecoveryConfigurationName The name of the disaster recovery configuration to failover .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ throws CloudException thrown if the request is rejected by server
* @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent */
public void beginFailover ( String resourceGroupName , String serverName , String disasterRecoveryConfigurationName ) { } } | beginFailoverWithServiceResponseAsync ( resourceGroupName , serverName , disasterRecoveryConfigurationName ) . toBlocking ( ) . single ( ) . body ( ) ; |
public class KeyedStream { /** * Windows this { @ code KeyedStream } into sliding time windows .
* < p > This is a shortcut for either { @ code . window ( SlidingEventTimeWindows . of ( size , slide ) ) } or
* { @ code . window ( SlidingProcessingTimeWindows . of ( size , slide ) ) } depending on the time
* characteristic set using
* { @ link org . apache . flink . streaming . api . environment . StreamExecutionEnvironment # setStreamTimeCharacteristic ( org . apache . flink . streaming . api . TimeCharacteristic ) }
* @ param size The size of the window . */
public WindowedStream < T , KEY , TimeWindow > timeWindow ( Time size , Time slide ) { } } | if ( environment . getStreamTimeCharacteristic ( ) == TimeCharacteristic . ProcessingTime ) { return window ( SlidingProcessingTimeWindows . of ( size , slide ) ) ; } else { return window ( SlidingEventTimeWindows . of ( size , slide ) ) ; } |
public class ReloadableSparkeyReader { /** * Load a new log file into this reader .
* @ param logFile the log file to load .
* @ return A future that resolves to the sparkey reader once it has loaded the new log file . */
public CompletionStage < ReloadableSparkeyReader > load ( final File logFile ) { } } | checkArgument ( isValidLogFile ( logFile ) ) ; CompletableFuture < ReloadableSparkeyReader > result = new CompletableFuture < > ( ) ; this . executorService . submit ( ( ) -> { switchReader ( logFile ) ; result . complete ( this ) ; } ) ; return result ; |
public class AmazonRDSClient { /** * Creates a DBSnapshot . The source DBInstance must be in " available " state .
* @ param createDBSnapshotRequest
* @ return Result of the CreateDBSnapshot operation returned by the service .
* @ throws DBSnapshotAlreadyExistsException
* < i > DBSnapshotIdentifier < / i > is already used by an existing snapshot .
* @ throws InvalidDBInstanceStateException
* The DB instance isn ' t in a valid state .
* @ throws DBInstanceNotFoundException
* < i > DBInstanceIdentifier < / i > doesn ' t refer to an existing DB instance .
* @ throws SnapshotQuotaExceededException
* The request would result in the user exceeding the allowed number of DB snapshots .
* @ sample AmazonRDS . CreateDBSnapshot
* @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / rds - 2014-10-31 / CreateDBSnapshot " target = " _ top " > AWS API
* Documentation < / a > */
@ Override public DBSnapshot createDBSnapshot ( CreateDBSnapshotRequest request ) { } } | request = beforeClientExecution ( request ) ; return executeCreateDBSnapshot ( request ) ; |
public class HttpServlets { /** * 获取日期参数 。
* @ param request
* 请求
* @ param name
* 参数名
* @ param format
* 日期格式
* @ return 参数内容 */
public static Calendar getCalendarParameter ( final HttpServletRequest request , final String name , final String format ) { } } | String value = request . getParameter ( name ) ; if ( StringUtils . isBlank ( value ) ) { return null ; } return DateFormatHelper . parse ( value , format ) ; |
public class H2HandlerImpl { /** * Determines if a given request is an http2 upgrade request */
@ Override public boolean isH2Request ( HttpInboundConnection hic , ServletRequest request ) throws ServletException { } } | // first check if H2 is enabled for this channel / port
if ( ! ( ( Http2InboundConnection ) hic ) . isHTTP2UpgradeRequest ( null , true ) ) { return false ; } Map < String , String > headers = new HashMap < String , String > ( ) ; HttpServletRequest hsrt = ( HttpServletRequest ) request ; Enumeration < String > headerNames = hsrt . getHeaderNames ( ) ; // create a map of the headers to pass to the transport code
while ( headerNames . hasMoreElements ( ) ) { String key = ( String ) headerNames . nextElement ( ) ; String value = hsrt . getHeader ( key ) ; headers . put ( key , value ) ; } // check if this request is asking to do H2
return ( ( Http2InboundConnection ) hic ) . isHTTP2UpgradeRequest ( headers , false ) ; |
public class SubscriptionFlexHandler { public Object initResult ( ) { } } | if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "initResult" ) ; Set theResults = new HashSet ( ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "initResult" ) ; return theResults ; |
public class MessagePacker { /** * Writes a byte array to the output .
* This method is used with { @ link # packRawStringHeader ( int ) } or { @ link # packBinaryHeader ( int ) } methods .
* Unlike { @ link # writePayload ( byte [ ] , int , int ) } method , this method does not make a defensive copy of the
* given byte array , even if it is shorter than { @ link MessagePack . PackerConfig # withBufferFlushThreshold ( int ) } .
* This is faster than { @ link # writePayload ( byte [ ] ) } method but caller must not modify the byte array after
* calling this method .
* @ param src the data to add
* @ param off the start offset in the data
* @ param len the number of bytes to add
* @ return this
* @ throws IOException when underlying output throws IOException
* @ see # writePayload ( byte [ ] , int , int ) */
public MessagePacker addPayload ( byte [ ] src , int off , int len ) throws IOException { } } | if ( buffer == null || buffer . size ( ) - position < len || len > bufferFlushThreshold ) { flush ( ) ; // call flush before add
// Directly add the payload without using the buffer
out . add ( src , off , len ) ; totalFlushBytes += len ; } else { buffer . putBytes ( position , src , off , len ) ; position += len ; } return this ; |
public class ListUtil { /** * List分页函数 */
public static < T > List < List < T > > partition ( List < T > list , int size ) { } } | return Lists . partition ( list , size ) ; |
public class Ldif { /** * Return the next record in the input stream . */
public DirRecord nextRecord ( ) throws NamingException { } } | if ( inp == null ) { if ( in == null ) { throw new NamingException ( "No ldif input stream" ) ; } inp = new LdifRecord . Input ( ) ; inp . init ( new InputStreamReader ( in ) ) ; } else if ( inp . eof ) { return null ; } LdifRecord ldr = new LdifRecord ( ) ; if ( ! ldr . read ( inp ) ) { return null ; } return ldr ; |
public class PaymentIntent { /** * Capture the funds of an existing uncaptured PaymentIntent when its status is < code >
* requires _ capture < / code > .
* < p > Uncaptured PaymentIntents will be canceled exactly seven days after they are created .
* < p > Read the < a
* href = " / docs / payments / payment - intents / creating - payment - intents # separate - auth - capture " > expanded
* documentation < / a > to learn more about separate authorization and capture . */
public PaymentIntent capture ( ) throws StripeException { } } | return capture ( ( Map < String , Object > ) null , ( RequestOptions ) null ) ; |
public class MultiHashTable { /** * version : key is used */
@ Nonnull public < B > HashTable < K , B > toHashTable ( @ Nonnull F2 < K , ImmutableList < V > , B > conversion ) { } } | return this . data . foldLeft ( ( acc , p ) -> acc . put ( p . left , conversion . apply ( p . left , p . right ) ) , HashTable . empty ( this . data . hasher ) ) ; |
public class SquareRegularClustersIntoGrids { /** * There are only two edges on target . Pick the edge which does not go to the provided child */
static SquareNode pickNot ( SquareNode target , SquareNode child ) { } } | for ( int i = 0 ; i < 4 ; i ++ ) { SquareEdge e = target . edges [ i ] ; if ( e == null ) continue ; SquareNode c = e . destination ( target ) ; if ( c != child ) return c ; } throw new RuntimeException ( "There was no odd one out some how" ) ; |
public class RestApiClient { /** * Delete roster entry .
* @ param username
* the username
* @ param jid
* the jid
* @ return the response */
public Response deleteRosterEntry ( String username , String jid ) { } } | return restClient . delete ( "users/" + username + "/roster/" + jid , new HashMap < String , String > ( ) ) ; |
public class UIMetricUtils { /** * get MetricSnapshot formatted value string */
public static String getMetricValue ( MetricSnapshot snapshot ) { } } | if ( snapshot == null ) return null ; MetricType type = MetricType . parse ( snapshot . get_metricType ( ) ) ; switch ( type ) { case COUNTER : return format ( snapshot . get_longValue ( ) ) ; case GAUGE : return format ( snapshot . get_doubleValue ( ) ) ; case METER : return format ( snapshot . get_m1 ( ) ) ; case HISTOGRAM : return format ( snapshot . get_mean ( ) ) ; default : return "0" ; } |
public class JobCredentialsInner { /** * Gets a jobs credential .
* @ param resourceGroupName The name of the resource group that contains the resource . You can obtain this value from the Azure Resource Manager API or the portal .
* @ param serverName The name of the server .
* @ param jobAgentName The name of the job agent .
* @ param credentialName The name of the credential .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ throws CloudException thrown if the request is rejected by server
* @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
* @ return the JobCredentialInner object if successful . */
public JobCredentialInner get ( String resourceGroupName , String serverName , String jobAgentName , String credentialName ) { } } | return getWithServiceResponseAsync ( resourceGroupName , serverName , jobAgentName , credentialName ) . toBlocking ( ) . single ( ) . body ( ) ; |
public class SeaGlassInternalFrameTitlePane { /** * Uninstall the defaults . */
public void uninstallDefaults ( ) { } } | SeaGlassContext context = getContext ( this , ENABLED ) ; style . uninstallDefaults ( context ) ; context . dispose ( ) ; style = null ; JInternalFrame . JDesktopIcon di = frame . getDesktopIcon ( ) ; if ( di != null && di . getComponentPopupMenu ( ) == systemPopupMenu ) { // Release link to systemMenu from the JInternalFrame
di . setComponentPopupMenu ( null ) ; } |
public class SQLiteUpdateTaskHelper { /** * Execute SQL .
* @ param database
* the database
* @ param fileInputStream
* the file input stream */
public static void executeSQL ( final SQLiteDatabase database , InputStream fileInputStream ) { } } | List < String > commands = readSQLFromFile ( fileInputStream ) ; executeSQL ( database , commands ) ; |
public class Base64 { /** * base64解码
* @ param base64 被解码的base64字符串
* @ param out 写出到的流
* @ param isCloseOut 是否关闭输出流
* @ since 4.0.9 */
public static void decodeToStream ( String base64 , OutputStream out , boolean isCloseOut ) { } } | IoUtil . write ( out , isCloseOut , Base64Decoder . decode ( base64 ) ) ; |
public class ContainerOverrides { /** * The command to send to the container that overrides the default command from the Docker image or the job
* definition .
* @ param command
* The command to send to the container that overrides the default command from the Docker image or the job
* definition . */
public void setCommand ( java . util . Collection < String > command ) { } } | if ( command == null ) { this . command = null ; return ; } this . command = new java . util . ArrayList < String > ( command ) ; |
public class ConnectionImpl { /** * This method accepts the same arguments as the Core SPI equivalent
* and returns the same class objects . The difference is that this
* method will only work against MQLink destinations . If a non - mqlink
* destination is supplied an SIDestinationWrongTypeException is thrown .
* @ see com . ibm . wsspi . sib . core . SICoreException # createConsumerSession ( SIBUuid12 , boolean )
* @ param mqLinkUuid
* @ param selector
* @ param unrecoverableReliability
* @ return
* @ throws SIDestinationWrongTypeException
* @ throws SIDestinationNotFoundException
* @ throws SIDestinationLockedException
* @ throws SISelectorSyntaxException
* @ throws SIObjectClosedException
* @ throws SICoreException */
@ Override public ConsumerSession createMQLinkConsumerSession ( String mqLinkUuidStr , SelectionCriteria criteria , Reliability unrecoverableReliability ) throws SIConnectionUnavailableException , SIConnectionDroppedException , SIResourceException , SIConnectionLostException , SILimitExceededException , SIErrorException , SINotAuthorizedException , SIIncorrectCallException , SIDestinationLockedException , SINotPossibleInCurrentConfigurationException { } } | if ( TraceComponent . isAnyTracingEnabled ( ) && CoreSPIConnection . tc . isEntryEnabled ( ) ) SibTr . entry ( CoreSPIConnection . tc , "createMQLinkConsumerSession" , new Object [ ] { mqLinkUuidStr , criteria , unrecoverableReliability } ) ; MQLinkHandler mqLinkHandler = null ; boolean forwardScanning = false ; // See if this connection has been closed
checkNotClosed ( ) ; if ( mqLinkUuidStr == null ) { SIIncorrectCallException e = new SIIncorrectCallException ( nls . getFormattedMessage ( "MISSING_PARAM_ERROR_CWSIP0029" , new Object [ ] { "1:5347:1.347.1.25" } , null ) ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && CoreSPIConnection . tc . isEntryEnabled ( ) ) { SibTr . exception ( tc , e ) ; SibTr . exit ( CoreSPIConnection . tc , "createMQLinkConsumerSession" , e ) ; } throw e ; } SIBUuid8 mqLinkUuid = new SIBUuid8 ( mqLinkUuidStr ) ; // Get the destination .
mqLinkHandler = _destinationManager . getMQLinkLocalization ( mqLinkUuid , false ) ; // Check that it is an MQLink Handler
if ( mqLinkHandler == null ) { if ( TraceComponent . isAnyTracingEnabled ( ) && CoreSPIConnection . tc . isEntryEnabled ( ) ) SibTr . exit ( CoreSPIConnection . tc , "createMQLinkConsumerSession" , "SINotPossibleInCurrentConfigurationException" ) ; throw new SINotPossibleInCurrentConfigurationException ( nls . getFormattedMessage ( "MQLINK_ERROR_CWSIP0026" , new Object [ ] { mqLinkUuid , _messageProcessor . getMessagingEngineName ( ) } , null ) ) ; } ConsumerSessionImpl consumer = null ; // Synchronize on the close object , we don ' t want the connection closing
// while we try to add the consumer .
synchronized ( this ) { // See if this connection has been closed
checkNotClosed ( ) ; // create a state object for this consumer session
// In this basic form it is just a wrapper for the discriminator and selector
ConsumerDispatcherState state = new ConsumerDispatcherState ( null , mqLinkHandler . getUuid ( ) , criteria , false , "" , // null durableHome ok here
mqLinkHandler . getName ( ) , mqLinkHandler . getBusName ( ) ) ; // Create a destination address
SIDestinationAddress destAddress = DestinationSessionUtils . createJsDestinationAddress ( mqLinkHandler ) ; try { // create a new ConsumerSession
consumer = new ConsumerSessionImpl ( mqLinkHandler , destAddress , state , this , false , forwardScanning , unrecoverableReliability , false , true , false ) ; } catch ( SIDurableSubscriptionMismatchException e ) { // This exception should not be thrown so FFDC
FFDCFilter . processException ( e , "com.ibm.ws.sib.processor.impl.ConnectionImpl.createMQLinkConsumerSession" , "1:5424:1.347.1.25" , this ) ; SibTr . exception ( tc , e ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && CoreSPIConnection . tc . isEntryEnabled ( ) ) SibTr . exit ( CoreSPIConnection . tc , "createMQLinkConsumerSession" , e ) ; SibTr . error ( tc , "INTERNAL_MESSAGING_ERROR_CWSIP0002" , new Object [ ] { "com.ibm.ws.sib.processor.impl.ConnectionImpl" , "1:5435:1.347.1.25" , e } ) ; // This should never be thrown
throw new SIErrorException ( nls . getFormattedMessage ( "INTERNAL_MESSAGING_ERROR_CWSIP0002" , new Object [ ] { "com.ibm.ws.sib.processor.impl.ConnectionImpl" , "1:5444:1.347.1.25" , e } , null ) , e ) ; } catch ( SINonDurableSubscriptionMismatchException e ) { // This exception should not be thrown so FFDC
FFDCFilter . processException ( e , "com.ibm.ws.sib.processor.impl.ConnectionImpl.createMQLinkConsumerSession" , "1:5318:1.347.1.25" , this ) ; SibTr . exception ( tc , e ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && CoreSPIConnection . tc . isEntryEnabled ( ) ) SibTr . exit ( CoreSPIConnection . tc , "createMQLinkConsumerSession" , e ) ; SibTr . error ( tc , "INTERNAL_MESSAGING_ERROR_CWSIP0002" , new Object [ ] { "com.ibm.ws.sib.processor.impl.ConnectionImpl" , "1:5329:1.347.1.25" , e } ) ; // This should never be thrown
throw new SIErrorException ( nls . getFormattedMessage ( "INTERNAL_MESSAGING_ERROR_CWSIP0002" , new Object [ ] { "com.ibm.ws.sib.processor.impl.ConnectionImpl" , "1:5338:1.347.1.25" , e } , null ) , e ) ; } catch ( SIDurableSubscriptionNotFoundException e ) { // This exception should not be thrown so FFDC
FFDCFilter . processException ( e , "com.ibm.ws.sib.processor.impl.ConnectionImpl.createMQLinkConsumerSession" , "1:5455:1.347.1.25" , this ) ; SibTr . exception ( tc , e ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && CoreSPIConnection . tc . isEntryEnabled ( ) ) SibTr . exit ( CoreSPIConnection . tc , "createMQLinkConsumerSession" , e ) ; SibTr . error ( tc , "INTERNAL_MESSAGING_ERROR_CWSIP0002" , new Object [ ] { "com.ibm.ws.sib.processor.impl.ConnectionImpl" , "1:5466:1.347.1.25" , e } ) ; // This should never be thrown
throw new SIErrorException ( nls . getFormattedMessage ( "INTERNAL_MESSAGING_ERROR_CWSIP0002" , new Object [ ] { "com.ibm.ws.sib.processor.impl.ConnectionImpl" , "1:5475:1.347.1.25" , e } , null ) , e ) ; } catch ( SISessionUnavailableException e ) { // This exception should not be thrown so FFDC
FFDCFilter . processException ( e , "com.ibm.ws.sib.processor.impl.ConnectionImpl.createMQLinkConsumerSession" , "1:5486:1.347.1.25" , this ) ; SibTr . exception ( tc , e ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && CoreSPIConnection . tc . isEntryEnabled ( ) ) SibTr . exit ( CoreSPIConnection . tc , "createMQLinkConsumerSession" , e ) ; SibTr . error ( tc , "INTERNAL_MESSAGING_ERROR_CWSIP0002" , new Object [ ] { "com.ibm.ws.sib.processor.impl.ConnectionImpl" , "1:5497:1.347.1.25" , e } ) ; // This should never be thrown
throw new SIErrorException ( nls . getFormattedMessage ( "INTERNAL_MESSAGING_ERROR_CWSIP0002" , new Object [ ] { "com.ibm.ws.sib.processor.impl.ConnectionImpl" , "1:5506:1.347.1.25" , e } , null ) , e ) ; } catch ( SITemporaryDestinationNotFoundException e ) { // This exception should not be thrown so FFDC
FFDCFilter . processException ( e , "com.ibm.ws.sib.processor.impl.ConnectionImpl.createMQLinkConsumerSession" , "1:5517:1.347.1.25" , this ) ; SibTr . exception ( tc , e ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && CoreSPIConnection . tc . isEntryEnabled ( ) ) SibTr . exit ( CoreSPIConnection . tc , "createMQLinkConsumerSession" , e ) ; SibTr . error ( tc , "INTERNAL_MESSAGING_ERROR_CWSIP0002" , new Object [ ] { "com.ibm.ws.sib.processor.impl.ConnectionImpl" , "1:5528:1.347.1.25" , e } ) ; // This should never be thrown
throw new SIErrorException ( nls . getFormattedMessage ( "INTERNAL_MESSAGING_ERROR_CWSIP0002" , new Object [ ] { "com.ibm.ws.sib.processor.impl.ConnectionImpl" , "1:5537:1.347.1.25" , e } , null ) , e ) ; } synchronized ( _consumers ) { // store a reference
_consumers . add ( consumer ) ; } _messageProcessor . addConsumer ( consumer ) ; } if ( TraceComponent . isAnyTracingEnabled ( ) && CoreSPIConnection . tc . isEntryEnabled ( ) ) SibTr . exit ( CoreSPIConnection . tc , "createMQLinkConsumerSession" , consumer ) ; return consumer ; |
public class SoftInputLayout { /** * 修改第一次打开表情之前 , 键盘没有弹出之前的BUG . */
private void fixContentLayoutHeight ( final int oldH , final int newH ) { } } | postDelayed ( new Runnable ( ) { @ Override public void run ( ) { LayoutParams params = ( LayoutParams ) mContentLayout . getLayoutParams ( ) ; params . height = newH ; params . weight = 0 ; requestLayout ( ) ; unlockContentLayoutHeight ( ) ; } } , 100 ) ; |
public class LegacyBehavior { /** * Remove all entries for legacy non - scopes given that the assigned scope execution is also responsible for another scope */
public static void removeLegacyNonScopesFromMapping ( Map < ScopeImpl , PvmExecutionImpl > mapping ) { } } | Map < PvmExecutionImpl , List < ScopeImpl > > scopesForExecutions = new HashMap < PvmExecutionImpl , List < ScopeImpl > > ( ) ; for ( Map . Entry < ScopeImpl , PvmExecutionImpl > mappingEntry : mapping . entrySet ( ) ) { List < ScopeImpl > scopesForExecution = scopesForExecutions . get ( mappingEntry . getValue ( ) ) ; if ( scopesForExecution == null ) { scopesForExecution = new ArrayList < ScopeImpl > ( ) ; scopesForExecutions . put ( mappingEntry . getValue ( ) , scopesForExecution ) ; } scopesForExecution . add ( mappingEntry . getKey ( ) ) ; } for ( Map . Entry < PvmExecutionImpl , List < ScopeImpl > > scopesForExecution : scopesForExecutions . entrySet ( ) ) { List < ScopeImpl > scopes = scopesForExecution . getValue ( ) ; if ( scopes . size ( ) > 1 ) { ScopeImpl topMostScope = getTopMostScope ( scopes ) ; for ( ScopeImpl scope : scopes ) { if ( scope != scope . getProcessDefinition ( ) && scope != topMostScope ) { mapping . remove ( scope ) ; } } } } |
public class MiscUtils { /** * Get the " android : versionCode " value from the Manifest file .
* @ param context The current Context or Activity that this method is called from
* @ return the application version code , or - 999 if versionName cannot be found for the given context . */
public static int getVersionCode ( Context context ) { } } | int versionCode ; try { versionCode = context . getPackageManager ( ) . getPackageInfo ( context . getPackageName ( ) , 0 ) . versionCode ; } catch ( NameNotFoundException e ) { versionCode = - 999 ; } return versionCode ; |
public class TemplCommandOut { /** * Invoke the command execution method given at object creation time .
* This method is automtically called by the TANGO core classes when the
* associated command is requested by a client . It invokes the user supplied
* command execution method and packs the returned data into the outgoing
* CORBA Any object
* @ param dev The device on which the command must be executed
* @ param in _ any The incoming data still packed in a CORBA Any object . For
* command created with this TemplCommandOut class , this Any object does not
* contain usefull data
* @ return The CORBA Any object returned to the client .
* @ exception DevFailed If the execution method failed
* Click < a href = " . . / . . / tango _ basic / idl _ html / Tango . html # DevFailed " > here < / a > to read
* < b > DevFailed < / b > exception specification */
public Any execute ( DeviceImpl dev , Any in_any ) throws DevFailed { } } | // Execute the command associated method
Any returned_data = null ; try { // Execute the method
java . lang . Object [ ] meth_param = new java . lang . Object [ 0 ] ; java . lang . Object obj = exe_method . invoke ( dev , meth_param ) ; // Insert data into the any according to command parameter type
switch ( out_type ) { case Tango_DEV_BOOLEAN : returned_data = insert ( ( Boolean ) obj ) ; break ; case Tango_DEV_SHORT : returned_data = insert ( ( ( Short ) obj ) . shortValue ( ) ) ; break ; case Tango_DEV_LONG : returned_data = insert ( ( ( Integer ) obj ) . intValue ( ) ) ; break ; case Tango_DEV_LONG64 : returned_data = insert ( ( ( Integer ) obj ) . longValue ( ) ) ; break ; case Tango_DEV_FLOAT : returned_data = insert ( ( ( Float ) obj ) . floatValue ( ) ) ; break ; case Tango_DEV_DOUBLE : returned_data = insert ( ( Double ) obj ) ; break ; case Tango_DEV_STRING : returned_data = insert ( ( String ) obj ) ; break ; case Tango_DEVVAR_CHARARRAY : returned_data = insert ( ( byte [ ] ) obj ) ; break ; case Tango_DEVVAR_SHORTARRAY : returned_data = insert ( ( short [ ] ) obj ) ; break ; case Tango_DEVVAR_LONGARRAY : returned_data = insert ( ( int [ ] ) obj ) ; break ; case Tango_DEVVAR_LONG64ARRAY : returned_data = insert ( ( long [ ] ) obj ) ; break ; case Tango_DEVVAR_FLOATARRAY : returned_data = insert ( ( float [ ] ) obj ) ; break ; case Tango_DEVVAR_DOUBLEARRAY : returned_data = insert ( ( double [ ] ) obj ) ; break ; case Tango_DEVVAR_STRINGARRAY : returned_data = insert ( ( String [ ] ) obj ) ; break ; case Tango_DEVVAR_LONGSTRINGARRAY : returned_data = insert ( ( DevVarLongStringArray ) obj ) ; break ; case Tango_DEVVAR_DOUBLESTRINGARRAY : returned_data = insert ( ( DevVarDoubleStringArray ) obj ) ; break ; case Tango_DEV_STATE : returned_data = insert ( ( DevState ) obj ) ; break ; } } catch ( InvocationTargetException e ) { throw ( DevFailed ) ( e . getTargetException ( ) ) ; } catch ( IllegalArgumentException e ) { StringBuffer mess = new StringBuffer ( "Argument error when trying to invoke method " ) ; mess . append ( exe_method ) ; Except . throw_exception ( "API_MethodArgument" , mess . toString ( ) , "TemplCommandOut.execute()" ) ; } catch ( IllegalAccessException e ) { StringBuffer mess = new StringBuffer ( "Argument error when trying to invoke method " ) ; mess . append ( exe_method ) ; Except . throw_exception ( "API_MethodArgument" , mess . toString ( ) , "TemplCommandOut.execute()" ) ; } // Return the Any
return returned_data ; |
public class GPXPoint { /** * Set the position dilution of precision of a point .
* @ param contentBuffer Contains the information to put in the table */
public final void setPdop ( StringBuilder contentBuffer ) { } } | ptValues [ GpxMetadata . PTPDOP ] = Double . parseDouble ( contentBuffer . toString ( ) ) ; |
public class FirstFitDecreasingPacking { /** * Sort the components in decreasing order based on their RAM requirements
* @ return The sorted list of components and their RAM requirements */
private List < ResourceRequirement > getSortedInstances ( Set < String > componentNames ) { } } | List < ResourceRequirement > resourceRequirements = new ArrayList < > ( ) ; for ( String componentName : componentNames ) { Resource requiredResource = this . componentResourceMap . getOrDefault ( componentName , defaultInstanceResources ) ; resourceRequirements . add ( new ResourceRequirement ( componentName , requiredResource . getRam ( ) , requiredResource . getCpu ( ) ) ) ; } Collections . sort ( resourceRequirements , sortingStrategy . reversed ( ) ) ; return resourceRequirements ; |
public class Link { /** * Create a ' curies ' link ( compact URI ) with name and a URI template for the link - relation type .
* Curies may be used for brevity for custom link - relation type URIs . Curiess are established within a HAL document
* via a set of Link Objects with the relation type " curies " on the root Resource Object .
* These links contain a URI template with the token ' rel ' , and are named via the " name " property .
* < pre > < code >
* " _ links " : {
* " self " : { " href " : " / orders " } ,
* " curies " : [ {
* " name " : " acme " ,
* " href " : " http : / / docs . acme . com / relations / { rel } " ,
* " templated " : true
* " acme : widgets " : { " href " : " / widgets " }
* < / code > < / pre >
* @ param name the short name of the CURI
* @ param relTemplate the template used to build link - relation types . Must contain a { rel } placeholder
* @ return Link
* @ see < a href = " http : / / www . iana . org / assignments / link - relations / link - relations . xhtml " > IANA link - relations < / a >
* @ see < a href = " https : / / tools . ietf . org / html / draft - kelly - json - hal - 08 # section - 8.2 " > draft - kelly - json - hal - 08 # section - 8.2 < / a >
* @ since 0.3.0 */
public static Link curi ( final String name , final String relTemplate ) { } } | if ( ! relTemplate . contains ( "{rel}" ) ) { throw new IllegalArgumentException ( "Not a CURI template. Template is required to contain a {rel} placeholder" ) ; } return new Link ( "curies" , relTemplate , null , null , null , name , null , null ) ; |
public class IO { /** * Load properties from a URL
* @ param url the URL of the properties file
* @ return
* the properties contains the content of the URL or an empty properties
* if the URL is invalid or null */
public static Properties loadProperties ( URL url ) { } } | if ( null == url ) { return new Properties ( ) ; } return loadProperties ( inputStream ( url ) ) ; |
public class Maps { /** * Check if the specified < code > Map < / code > contains the specified < code > Entry < / code >
* @ param map
* @ param entry
* @ return */
public static boolean contains ( final Map < ? , ? > map , final Map . Entry < ? , ? > entry ) { } } | return contains ( map , entry . getKey ( ) , entry . getValue ( ) ) ; |
public class JmxSecurity { /** * Example return encrypted ( . . . . . )
* @ param password String to be encrypted
* @ return String encrypted String */
public static char [ ] encrypt ( char [ ] password ) { } } | if ( password == null || password . length == 0 ) { return null ; } try { String encryptedString = null ; SecretKeySpec key = new SecretKeySpec ( init , "Blowfish" ) ; Cipher cipher = Cipher . getInstance ( CIPHER_INSTANCE ) ; cipher . init ( Cipher . ENCRYPT_MODE , key ) ; byte [ ] encrypted = cipher . doFinal ( String . valueOf ( password ) . getBytes ( Charset . forName ( "UTF8" ) ) ) ; encryptedString = byteArrayToHexString ( encrypted ) ; return encryptedString . toCharArray ( ) ; } catch ( Exception e ) { throw new SecurityException ( "Unable to encrypt password" , e ) ; } |
public class RouteNotFound { /** * Service a request .
* @ param request the http request facade
* @ param response the http response facade */
@ Override public boolean service ( RequestBaratine request ) { } } | request . status ( HttpStatus . NOT_FOUND ) ; request . write ( "Not found: " + request . uri ( ) + "\n" ) ; request . ok ( ) ; return true ; |
public class ArrayDeque { /** * Returns { @ code true } if this deque contains the specified element .
* More formally , returns { @ code true } if and only if this deque contains
* at least one element { @ code e } such that { @ code o . equals ( e ) } .
* @ param o object to be checked for containment in this deque
* @ return { @ code true } if this deque contains the specified element */
public boolean contains ( Object o ) { } } | if ( o != null ) { int mask = elements . length - 1 ; int i = head ; for ( Object x ; ( x = elements [ i ] ) != null ; i = ( i + 1 ) & mask ) { if ( o . equals ( x ) ) return true ; } } return false ; |
public class RandomVariableLowMemory { /** * / * ( non - Javadoc )
* @ see net . finmath . stochastic . RandomVariableInterface # expand ( ) */
public RandomVariableInterface expand ( int numberOfPaths ) { } } | if ( isDeterministic ( ) ) { // Expand random variable to a vector of path values
double [ ] clone = new double [ numberOfPaths ] ; java . util . Arrays . fill ( clone , valueIfNonStochastic ) ; return new RandomVariableLowMemory ( time , clone ) ; } return new RandomVariableLowMemory ( time , realizations . clone ( ) ) ; |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.