signature stringlengths 43 39.1k | implementation stringlengths 0 450k |
|---|---|
public class RawMessage { /** * Set the expected content length of this message . This value comes from
* the Content - Length header and if it is exceeding the maximum allowed
* payload length then we will throw a
* { @ link MaxMessageSizeExceededException } .
* @ param contentLength
* the content length of the payload
* @ throws MaxMessageSizeExceededException
* in case the payload exceeds the maximum allowed length . */
public void setContentLength ( final int contentLength ) throws MaxMessageSizeExceededException { } } | if ( contentLength > this . maxAllowedContentLength ) { throw new MaxMessageSizeExceededException ( "Content length exceeds the maximum allowed length" ) ; } this . contentLength = contentLength ; this . payload = new byte [ contentLength ] ; |
public class MorfologikMultiSpeller { /** * Accept the word if at least one of the dictionaries accepts it as not misspelled . */
public boolean isMisspelled ( String word ) { } } | for ( MorfologikSpeller speller : spellers ) { if ( ! speller . isMisspelled ( word ) ) { return false ; } } return true ; |
public class PropertyImpl { /** * { @ inheritDoc } */
public void setValue ( double number ) throws ValueFormatException , VersionException , LockException , ConstraintViolationException , RepositoryException { } } | setValue ( valueFactory . createValue ( number ) ) ; |
public class ST_TriangleAspect { /** * Compute the aspect in degree . The geometry must be a triangle .
* @ param geometry Polygon triangle
* @ return aspect in degree
* @ throws IllegalArgumentException ST _ TriangleAspect accept only triangles */
public static Double computeAspect ( Geometry geometry ) throws IllegalArgumentException { } } | if ( geometry == null ) { return null ; } Vector3D vector = TriMarkers . getSteepestVector ( TriMarkers . getNormalVector ( TINFeatureFactory . createTriangle ( geometry ) ) , TINFeatureFactory . EPSILON ) ; if ( vector . length ( ) < TINFeatureFactory . EPSILON ) { return 0d ; } else { Vector2D v = new Vector2D ( vector . getX ( ) , vector . getY ( ) ) ; return measureFromNorth ( Math . toDegrees ( v . angle ( ) ) ) ; } |
public class MultiDbJDBCConnection { /** * { @ inheritDoc } */
@ Override protected int addValueData ( String cid , int orderNumber , InputStream stream , int streamLength , String storageDesc ) throws SQLException { } } | if ( insertValue == null ) { insertValue = dbConnection . prepareStatement ( INSERT_VALUE ) ; } else { insertValue . clearParameters ( ) ; } if ( stream == null ) { // [ PN ] store vd reference to external storage etc .
insertValue . setNull ( 1 , Types . BINARY ) ; insertValue . setString ( 4 , storageDesc ) ; } else { insertValue . setBinaryStream ( 1 , stream , streamLength ) ; insertValue . setNull ( 4 , Types . VARCHAR ) ; } insertValue . setInt ( 2 , orderNumber ) ; insertValue . setString ( 3 , cid ) ; return insertValue . executeUpdate ( ) ; |
public class JsApiHdrsImpl { /** * Return the derived JmsExpiration for a non - JMS / MQ produced message .
* The value is derived from the Timestamp and TimeToLive values .
* d336582 - now want to return 0 if Timestamp or TimeToLive are ' not set ' .
* @ return long The JmsExpiration value for the JMS message . */
final long getDerivedJmsExpiration ( ) { } } | long ts = getTimestamp ( ) . longValue ( ) ; long ttl = getTimeToLive ( ) . longValue ( ) ; if ( ( ts <= 0 ) || ( ttl <= 0 ) ) { return 0 ; } else { return ( ts + ttl ) ; } |
public class DataInputOutput { /** * make sure there will be enought space in buffer to write N bytes */
private void ensureAvail ( int n ) { } } | if ( pos + n >= buf . length ) { int newSize = Math . max ( pos + n , buf . length * 2 ) ; buf = Arrays . copyOf ( buf , newSize ) ; } |
public class BeanUtils { /** * 将Bean类指定修饰符的属性转换成JSON字符串
* @ param object Bean对象
* @ param modifier 属性的权限修饰符
* @ return 格式化的JSON字符串
* @ throws IllegalAccessException 异常 */
public static String toPrettyJson ( Object object , FieldModifier modifier ) throws IllegalAccessException { } } | return Formatter . formatJson ( toJsonString ( object , modifier ) ) ; |
public class SimpleConfigProperties { /** * Converts a flat { @ link Map } of configuration values to hierarchical { @ link ConfigProperties } . E . g . the flat map
* < code > { " foo . bar . some " = " some - value " , " foo . bar . other " = " other - value " } < / code > would result in { @ link ConfigProperties }
* { @ code myRoot } such that
* < code > myRoot . { @ link # getChild ( String . . . ) getChild } ( " foo " , " bar " ) . { @ link # getChildKeys ( ) } < / code > returns the
* { @ link Collection } { " some " , " other " } and
* < code > myRoot . { @ link # getChildValue ( String ) getValue } ( " foo . bar . some " ) < / code > returns " my - value " .
* @ param key the top - level key of the returned root { @ link ConfigProperties } - node . Typically the empty string ( " " )
* for root .
* @ param map the flat { @ link Map } of the configuration values .
* @ return the root { @ link ConfigProperties } - node of the given flat { @ link Map } converted to hierarchical
* { @ link ConfigProperties } . */
public static ConfigProperties ofFlatMap ( String key , Map < String , String > map ) { } } | SimpleConfigProperties root = new SimpleConfigProperties ( key ) ; root . fromFlatMap ( map ) ; return root ; |
public class OpenNlpHelper { /** * Conv . method to use the SentenceDetector directly ; create it with the
* method below */
public static List < String > splitSentence2 ( String text , SentenceDetector sd ) { } } | List < String > sentences = new ArrayList < String > ( ) ; try { int sentenceOffsets [ ] = sd . sentPosDetect ( text ) ; int begin = 0 ; int end = 0 ; for ( int i = 0 ; i < sentenceOffsets . length ; i ++ ) { end = begin + ( text . substring ( begin , sentenceOffsets [ i ] ) . trim ( ) ) . length ( ) ; sentences . add ( text . substring ( begin , end ) ) ; begin = sentenceOffsets [ i ] ; } } catch ( Exception e ) { LOG . warn ( "failed to extract sentences from text '" + text + "'" , e ) ; } return sentences ; |
public class RedisInner { /** * Export data from the redis cache to blobs in a container .
* @ param resourceGroupName The name of the resource group .
* @ param name The name of the Redis cache .
* @ param parameters Parameters for Redis export operation .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the { @ link ServiceResponse } object if successful . */
public Observable < Void > beginExportDataAsync ( String resourceGroupName , String name , ExportRDBParameters parameters ) { } } | return beginExportDataWithServiceResponseAsync ( resourceGroupName , name , parameters ) . map ( new Func1 < ServiceResponse < Void > , Void > ( ) { @ Override public Void call ( ServiceResponse < Void > response ) { return response . body ( ) ; } } ) ; |
public class forwardingsession { /** * Use this API to fetch all the forwardingsession resources that are configured on netscaler . */
public static forwardingsession [ ] get ( nitro_service service ) throws Exception { } } | forwardingsession obj = new forwardingsession ( ) ; forwardingsession [ ] response = ( forwardingsession [ ] ) obj . get_resources ( service ) ; return response ; |
public class ExpectedConditions { /** * An expectation for checking the title of a page .
* @ param title the expected title , which must be an exact match
* @ return true when the title matches , false otherwise */
public static ExpectedCondition < Boolean > titleIs ( final String title ) { } } | return new ExpectedCondition < Boolean > ( ) { private String currentTitle = "" ; @ Override public Boolean apply ( WebDriver driver ) { currentTitle = driver . getTitle ( ) ; return title . equals ( currentTitle ) ; } @ Override public String toString ( ) { return String . format ( "title to be \"%s\". Current title: \"%s\"" , title , currentTitle ) ; } } ; |
public class AlpineQueryManager { /** * Determines the effective permissions for the specified user by collecting
* a List of all permissions assigned to the user either directly , or through
* team membership .
* @ param user the user to retrieve permissions for
* @ return a List of Permission objects
* @ since 1.1.0 */
public List < Permission > getEffectivePermissions ( UserPrincipal user ) { } } | final LinkedHashSet < Permission > permissions = new LinkedHashSet < > ( ) ; if ( user . getPermissions ( ) != null ) { permissions . addAll ( user . getPermissions ( ) ) ; } if ( user . getTeams ( ) != null ) { for ( final Team team : user . getTeams ( ) ) { final List < Permission > teamPermissions = getObjectById ( Team . class , team . getId ( ) ) . getPermissions ( ) ; if ( teamPermissions != null ) { permissions . addAll ( teamPermissions ) ; } } } return new ArrayList < > ( permissions ) ; |
public class MessageSerializer { /** * Serializes the request sent to the
* { @ link org . apache . flink . queryablestate . network . AbstractServerBase } .
* @ param allocThe { @ link ByteBufAllocator } used to allocate the buffer to serialize the message into .
* @ param requestIdThe id of the request to which the message refers to .
* @ param requestThe request to be serialized .
* @ return A { @ link ByteBuf } containing the serialized message . */
public static < REQ extends MessageBody > ByteBuf serializeRequest ( final ByteBufAllocator alloc , final long requestId , final REQ request ) { } } | Preconditions . checkNotNull ( request ) ; return writePayload ( alloc , requestId , MessageType . REQUEST , request . serialize ( ) ) ; |
public class CreateClusterRequestMarshaller { /** * Marshall the given parameter object . */
public void marshall ( CreateClusterRequest createClusterRequest , ProtocolMarshaller protocolMarshaller ) { } } | if ( createClusterRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( createClusterRequest . getSubnetIds ( ) , SUBNETIDS_BINDING ) ; protocolMarshaller . marshall ( createClusterRequest . getHsmType ( ) , HSMTYPE_BINDING ) ; protocolMarshaller . marshall ( createClusterRequest . getSourceBackupId ( ) , SOURCEBACKUPID_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class IndexAVL { /** * Set a node as child of another
* @ param x parent node
* @ param isleft boolean
* @ param n child node */
private static NodeAVL set ( PersistentStore store , NodeAVL x , boolean isleft , NodeAVL n ) { } } | if ( isleft ) { x = x . setLeft ( store , n ) ; } else { x = x . setRight ( store , n ) ; } if ( n != null ) { n . setParent ( store , x ) ; } return x ; |
public class StringUtil { /** * Lower case the first letter of the given string .
* @ param _ str string
* @ return lowercased string */
public static String lowerCaseFirstChar ( String _str ) { } } | if ( _str == null ) { return null ; } if ( _str . isEmpty ( ) ) { return _str ; } return _str . substring ( 0 , 1 ) . toLowerCase ( ) + _str . substring ( 1 ) ; |
public class TimeoutHeaderUtil { /** * Serialize the given timeout to a { @ link String } . */
public static String toHeaderValue ( long timeoutNanos ) { } } | final long cutoff = 100000000 ; if ( timeoutNanos < 0 ) { throw new IllegalArgumentException ( "Timeout too small" ) ; } else if ( timeoutNanos < cutoff ) { return TimeUnit . NANOSECONDS . toNanos ( timeoutNanos ) + "n" ; } else if ( timeoutNanos < cutoff * 1000L ) { return TimeUnit . NANOSECONDS . toMicros ( timeoutNanos ) + "u" ; } else if ( timeoutNanos < cutoff * 1000L * 1000L ) { return TimeUnit . NANOSECONDS . toMillis ( timeoutNanos ) + "m" ; } else if ( timeoutNanos < cutoff * 1000L * 1000L * 1000L ) { return TimeUnit . NANOSECONDS . toSeconds ( timeoutNanos ) + "S" ; } else if ( timeoutNanos < cutoff * 1000L * 1000L * 1000L * 60L ) { return TimeUnit . NANOSECONDS . toMinutes ( timeoutNanos ) + "M" ; } else { return TimeUnit . NANOSECONDS . toHours ( timeoutNanos ) + "H" ; } |
public class MountPointInfo { /** * < code > optional string ufsUri = 1 ; < / code > */
public com . google . protobuf . ByteString getUfsUriBytes ( ) { } } | java . lang . Object ref = ufsUri_ ; if ( ref instanceof java . lang . String ) { com . google . protobuf . ByteString b = com . google . protobuf . ByteString . copyFromUtf8 ( ( java . lang . String ) ref ) ; ufsUri_ = b ; return b ; } else { return ( com . google . protobuf . ByteString ) ref ; } |
public class SsrcGenerator { /** * Combines four bytes ( most significant bit first ) into a 32 bit unsigned
* integer .
* @ param bytes
* @ param index
* of most significant byte
* @ return long with the 32 bit unsigned integer */
static long bytesToUIntLong ( byte [ ] bytes , int index ) { } } | long accum = 0 ; int i = 3 ; for ( int shiftBy = 0 ; shiftBy < 32 ; shiftBy += 8 ) { accum |= ( ( long ) ( bytes [ index + i ] & 0xff ) ) << shiftBy ; i -- ; } return accum ; |
public class gslbvserver { /** * Use this API to update gslbvserver . */
public static base_response update ( nitro_service client , gslbvserver resource ) throws Exception { } } | gslbvserver updateresource = new gslbvserver ( ) ; updateresource . name = resource . name ; updateresource . iptype = resource . iptype ; updateresource . dnsrecordtype = resource . dnsrecordtype ; updateresource . backupvserver = resource . backupvserver ; updateresource . backupsessiontimeout = resource . backupsessiontimeout ; updateresource . lbmethod = resource . lbmethod ; updateresource . backuplbmethod = resource . backuplbmethod ; updateresource . netmask = resource . netmask ; updateresource . v6netmasklen = resource . v6netmasklen ; updateresource . tolerance = resource . tolerance ; updateresource . persistencetype = resource . persistencetype ; updateresource . persistenceid = resource . persistenceid ; updateresource . persistmask = resource . persistmask ; updateresource . v6persistmasklen = resource . v6persistmasklen ; updateresource . timeout = resource . timeout ; updateresource . edr = resource . edr ; updateresource . mir = resource . mir ; updateresource . disableprimaryondown = resource . disableprimaryondown ; updateresource . dynamicweight = resource . dynamicweight ; updateresource . considereffectivestate = resource . considereffectivestate ; updateresource . somethod = resource . somethod ; updateresource . sopersistence = resource . sopersistence ; updateresource . sopersistencetimeout = resource . sopersistencetimeout ; updateresource . sothreshold = resource . sothreshold ; updateresource . sobackupaction = resource . sobackupaction ; updateresource . servicename = resource . servicename ; updateresource . weight = resource . weight ; updateresource . domainname = resource . domainname ; updateresource . ttl = resource . ttl ; updateresource . backupip = resource . backupip ; updateresource . cookie_domain = resource . cookie_domain ; updateresource . cookietimeout = resource . cookietimeout ; updateresource . sitedomainttl = resource . sitedomainttl ; updateresource . comment = resource . comment ; updateresource . appflowlog = resource . appflowlog ; return updateresource . update_resource ( client ) ; |
public class MiniProfilerServlet { /** * Generate the results for a set of requests in JSON format . */
private void doResults ( HttpServletRequest req , HttpServletResponse resp ) throws IOException , JsonGenerationException , JsonMappingException { } } | Map < String , Object > result = new HashMap < String , Object > ( ) ; String requestIds = req . getParameter ( "ids" ) ; if ( ! isEmpty ( requestIds ) ) { List < Map < String , Object > > requests = new ArrayList < Map < String , Object > > ( ) ; for ( String requestId : requestIds . split ( "," ) ) { requestId = requestId . trim ( ) ; @ SuppressWarnings ( "unchecked" ) Map < String , Object > requestData = ( Map < String , Object > ) ms . get ( String . format ( MiniProfilerFilter . MEMCACHE_KEY_FORMAT_STRING , requestId ) ) ; if ( requestData != null ) { Map < String , Object > request = new HashMap < String , Object > ( ) ; request . put ( "id" , requestId ) ; request . put ( "redirect" , requestData . get ( "redirect" ) ) ; request . put ( "requestURL" , requestData . get ( "requestURL" ) ) ; request . put ( "timestamp" , requestData . get ( "timestamp" ) ) ; request . put ( "profile" , requestData . get ( "profile" ) ) ; if ( requestData . containsKey ( "appstatsId" ) ) { Map < String , Object > appstatsMap = MiniProfilerAppstats . getAppstatsDataFor ( ( String ) requestData . get ( "appstatsId" ) , maxStackFrames ) ; request . put ( "appstats" , appstatsMap != null ? appstatsMap : null ) ; } else { request . put ( "appstats" , null ) ; } requests . add ( request ) ; } } result . put ( "ok" , true ) ; result . put ( "requests" , requests ) ; } else { result . put ( "ok" , false ) ; } resp . setContentType ( "application/json" ) ; resp . setHeader ( "Cache-Control" , "no-cache" ) ; ObjectMapper jsonMapper = new ObjectMapper ( ) ; jsonMapper . writeValue ( resp . getOutputStream ( ) , result ) ; |
public class Smb2CreateResponse { /** * { @ inheritDoc }
* @ see jcifs . internal . smb2 . ServerMessageBlock2Response # prepare ( jcifs . internal . CommonServerMessageBlockRequest ) */
@ Override public void prepare ( CommonServerMessageBlockRequest next ) { } } | if ( isReceived ( ) && ( next instanceof RequestWithFileId ) ) { ( ( RequestWithFileId ) next ) . setFileId ( this . fileId ) ; } super . prepare ( next ) ; |
public class CSSReader { /** * Check if the passed CSS resource can be parsed without error
* @ param aRes
* The resource to be parsed . May not be < code > null < / code > .
* @ param aFallbackCharset
* The charset to be used for reading the CSS file in case neither a
* < code > @ charset < / code > rule nor a BOM is present . May not be
* < code > null < / code > .
* @ param eVersion
* The CSS version to be used for scanning . May not be
* < code > null < / code > .
* @ return < code > true < / code > if the file can be parsed without error ,
* < code > false < / code > if not */
public static boolean isValidCSS ( @ Nonnull final IReadableResource aRes , @ Nonnull final Charset aFallbackCharset , @ Nonnull final ECSSVersion eVersion ) { } } | ValueEnforcer . notNull ( aRes , "Resource" ) ; ValueEnforcer . notNull ( aFallbackCharset , "FallbackCharset" ) ; ValueEnforcer . notNull ( eVersion , "Version" ) ; final Reader aReader = aRes . getReader ( aFallbackCharset ) ; if ( aReader == null ) { LOGGER . warn ( "Failed to open CSS reader " + aRes ) ; return false ; } return isValidCSS ( aReader , eVersion ) ; |
public class LdapTemplate { /** * { @ inheritDoc } */
@ Override public < T > List < T > find ( Name base , Filter filter , SearchControls searchControls , final Class < T > clazz ) { } } | Filter finalFilter = odm . filterFor ( clazz , filter ) ; // Search from the root if we are not told where to search from
Name localBase = base ; if ( base == null || base . size ( ) == 0 ) { localBase = LdapUtils . emptyLdapName ( ) ; } // extend search controls with the attributes to return
String [ ] attributes = odm . manageClass ( clazz ) ; searchControls . setReturningAttributes ( attributes ) ; if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( String . format ( "Searching - base=%1$s, finalFilter=%2$s, scope=%3$s" , base , finalFilter , searchControls ) ) ; } List < T > result = search ( localBase , finalFilter . encode ( ) , searchControls , new ContextMapper < T > ( ) { @ Override public T mapFromContext ( Object ctx ) throws javax . naming . NamingException { return odm . mapFromLdapDataEntry ( ( DirContextOperations ) ctx , clazz ) ; } } ) ; result . remove ( null ) ; if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( String . format ( "Found %1$s Entries - %2$s" , result . size ( ) , result ) ) ; } return result ; |
public class EncryptUtil { /** * Encode a string using algorithm specified in web . xml and return the
* resulting encrypted password . If exception , the plain credentials string
* is returned
* @ param password passord string
* @ param algorithm Algorithm used to do the digest
* @ return encypted password based on the algorithm . */
public static String encode ( String password , String algorithm ) { } } | byte [ ] unencodedPassword = password . getBytes ( ) ; MessageDigest md = null ; try { // first create an instance , given the provider
md = MessageDigest . getInstance ( algorithm ) ; } catch ( Exception e ) { logger . error ( "Exception:{}" , e ) ; return password ; } md . reset ( ) ; // call the update method one or more times
// ( useful when you don ' t know the size of your data , eg . stream )
md . update ( unencodedPassword ) ; // now calculate the hash
byte [ ] encodedPassword = md . digest ( ) ; StringBuilder buf = new StringBuilder ( ) ; for ( int i = 0 ; i < encodedPassword . length ; i ++ ) { if ( ( encodedPassword [ i ] & 0xff ) < 0x10 ) buf . append ( "0" ) ; buf . append ( Long . toString ( encodedPassword [ i ] & 0xff , 16 ) ) ; } return buf . toString ( ) ; |
public class PrimaveraXERFileReader { /** * Populates a Map instance representing the IDs and names of
* projects available in the current file .
* @ param is input stream used to read XER file
* @ return Map instance containing ID and name pairs
* @ throws MPXJException */
public Map < Integer , String > listProjects ( InputStream is ) throws MPXJException { } } | try { m_tables = new HashMap < String , List < Row > > ( ) ; processFile ( is ) ; Map < Integer , String > result = new HashMap < Integer , String > ( ) ; List < Row > rows = getRows ( "project" , null , null ) ; for ( Row row : rows ) { Integer id = row . getInteger ( "proj_id" ) ; String name = row . getString ( "proj_short_name" ) ; result . put ( id , name ) ; } return result ; } finally { m_tables = null ; m_currentTable = null ; m_currentFieldNames = null ; } |
public class EdmondsMaximumMatching { /** * Creates the blossom ' supports ' for the specified blossom ' bridge ' edge
* ( v , w ) . We travel down each side to the base of the blossom ( ' base ' )
* collapsing vertices and point any ' odd ' vertices to the correct ' bridge '
* edge . We do this by indexing the birdie to each vertex in the ' bridges '
* map .
* @ param v an endpoint of the blossom bridge
* @ param w another endpoint of the blossom bridge
* @ param base the base of the blossom */
private int [ ] blossomSupports ( int v , int w , int base ) { } } | int n = 0 ; path [ n ++ ] = dsf . getRoot ( v ) ; Tuple b = new Tuple ( v , w ) ; while ( path [ n - 1 ] != base ) { int u = even [ path [ n - 1 ] ] ; path [ n ++ ] = u ; this . bridges . put ( u , b ) ; // contracting the blossom allows us to continue searching from odd
// vertices ( any odd vertices are now even - part of the blossom set )
queue . add ( u ) ; path [ n ++ ] = dsf . getRoot ( odd [ u ] ) ; } return Arrays . copyOf ( path , n ) ; |
public class DecompressionInterceptor { /** * { @ inheritDoc } } */
@ Override public void execute ( IntuitMessage intuitMessage ) throws FMSException { } } | LOG . debug ( "Enter DecompressInterceptor..." ) ; ResponseElements responseElements = intuitMessage . getResponseElements ( ) ; // get the Header to check whether it has content - encoding .
// Header encodingHeader = responseElements . getEncodingHeader ( ) ;
String contentEncoding = responseElements . getEncodingHeader ( ) ; String decompressedData = null ; boolean isCompressionEnabled = false ; if ( contentEncoding != null ) { // if content - encoding exists then decompress the response data
// String contentEncoding = encodingHeader . getValue ( ) ;
if ( StringUtils . hasText ( contentEncoding ) ) { isCompressionEnabled = true ; LOG . info ( "compression format : " + contentEncoding ) ; LOG . debug ( "compression format : " + contentEncoding ) ; // get the compressor to decompress the response data
ICompressor compressor = CompressorFactory . getCompressor ( contentEncoding ) ; try { ByteArrayOutputStream bytes = ( ByteArrayOutputStream ) compressor . decompress ( responseElements . getResponseContent ( ) ) ; responseElements . setResponseBytes ( new ByteArrayInputStream ( bytes . toByteArray ( ) ) ) ; decompressedData = new String ( bytes . toByteArray ( ) ) ; } catch ( IllegalStateException e ) { LOG . error ( "IllegalStateException while get the content from HttpResponse." , e ) ; throw new FMSException ( e ) ; } catch ( Exception e ) { LOG . error ( "IOException in DecompressInterceptor." , e ) ; throw new FMSException ( e ) ; } } } if ( ! isCompressionEnabled ) { // if content - encoding or compression format does not exist
BufferedReader br = null ; String readLine = null ; StringBuilder responseBody = new StringBuilder ( ) ; try { br = new BufferedReader ( new InputStreamReader ( responseElements . getResponseContent ( ) ) ) ; // get the response body received from socket connection
while ( ( ( readLine = br . readLine ( ) ) != null ) ) { responseBody . append ( readLine ) . append ( System . getProperty ( "line.separator" ) ) ; } } catch ( IllegalStateException e ) { LOG . error ( "IllegalStateException while get the content from HttpResponse." , e ) ; throw new FMSException ( e ) ; } catch ( Exception e ) { LOG . error ( "IOException in DecompressInterceptor." , e ) ; throw new FMSException ( e ) ; } finally { if ( br != null ) { try { br . close ( ) ; } catch ( Exception e ) { LOG . warn ( "Unable to close BufferedReader" , e ) ; } } } try { responseElements . getResponseContent ( ) . reset ( ) ; } catch ( IOException ex ) { LOG . warn ( "Unable to reset ResponseContent for bytes without compression" , ex ) ; } decompressedData = responseBody . toString ( ) ; responseElements . setResponseBytes ( responseElements . getResponseContent ( ) ) ; } LOG . debug ( "Decompressed Response Body : " + decompressedData ) ; LOG . debug ( "Exit DecompressInterceptor." ) ; responseElements . setDecompressedData ( decompressedData ) ; |
public class MPPUtility { /** * Reads a time value . The time is represented as tenths of a
* minute since midnight .
* @ param data byte array of data
* @ param offset location of data as offset into the array
* @ return time value */
public static final Date getTime ( byte [ ] data , int offset ) { } } | int time = getShort ( data , offset ) / 10 ; Calendar cal = DateHelper . popCalendar ( EPOCH_DATE ) ; cal . set ( Calendar . HOUR_OF_DAY , ( time / 60 ) ) ; cal . set ( Calendar . MINUTE , ( time % 60 ) ) ; cal . set ( Calendar . SECOND , 0 ) ; cal . set ( Calendar . MILLISECOND , 0 ) ; DateHelper . pushCalendar ( cal ) ; return ( cal . getTime ( ) ) ; |
public class MySQLMetadataDAO { /** * Retrieve a { @ link EventHandler } by { @ literal name } .
* @ param connection The { @ link Connection } to use for queries .
* @ param name The { @ code EventHandler } name to look for .
* @ return { @ literal null } if nothing is found , otherwise the { @ code EventHandler } . */
private EventHandler getEventHandler ( Connection connection , String name ) { } } | final String READ_ONE_EVENT_HANDLER_QUERY = "SELECT json_data FROM meta_event_handler WHERE name = ?" ; return query ( connection , READ_ONE_EVENT_HANDLER_QUERY , q -> q . addParameter ( name ) . executeAndFetchFirst ( EventHandler . class ) ) ; |
public class LambdaToMethod { /** * Create new synthetic variable with given flags , name , type , owner */
private VarSymbol makeSyntheticVar ( long flags , Name name , Type type , Symbol owner ) { } } | return new VarSymbol ( flags | SYNTHETIC , name , type , owner ) ; |
public class InternalXbaseParser { /** * InternalXbase . g : 283:1 : entryRuleOpEquality : ruleOpEquality EOF ; */
public final void entryRuleOpEquality ( ) throws RecognitionException { } } | try { // InternalXbase . g : 284:1 : ( ruleOpEquality EOF )
// InternalXbase . g : 285:1 : ruleOpEquality EOF
{ if ( state . backtracking == 0 ) { before ( grammarAccess . getOpEqualityRule ( ) ) ; } pushFollow ( FOLLOW_1 ) ; ruleOpEquality ( ) ; state . _fsp -- ; if ( state . failed ) return ; if ( state . backtracking == 0 ) { after ( grammarAccess . getOpEqualityRule ( ) ) ; } match ( input , EOF , FOLLOW_2 ) ; if ( state . failed ) return ; } } catch ( RecognitionException re ) { reportError ( re ) ; recover ( input , re ) ; } finally { } return ; |
public class TokensApi { /** * Check Token
* ( Deprecated ) Check Token . See tokenInfo
* @ param tokenInfo Token object to be checked ( required )
* @ return CheckTokenResponse
* @ throws ApiException If fail to call the API , e . g . server error or cannot deserialize the response body */
public CheckTokenResponse checkToken ( TokenRequest tokenInfo ) throws ApiException { } } | ApiResponse < CheckTokenResponse > resp = checkTokenWithHttpInfo ( tokenInfo ) ; return resp . getData ( ) ; |
public class ByteUtil { /** * 字节数组和整型的转换 , 高位在前 , 适用于读取writeInt的数据
* @ param bytes 字节数组
* @ return 整型 */
public static int bytesHighFirstToInt ( byte [ ] bytes , int start ) { } } | int num = bytes [ start + 3 ] & 0xFF ; num |= ( ( bytes [ start + 2 ] << 8 ) & 0xFF00 ) ; num |= ( ( bytes [ start + 1 ] << 16 ) & 0xFF0000 ) ; num |= ( ( bytes [ start ] << 24 ) & 0xFF000000 ) ; return num ; |
public class TocTreeBuilder { /** * Create a new toc list item based on section title .
* @ param headerBlock the { @ link HeaderBlock } .
* @ return the new list item block . */
private ListItemBlock createTocEntry ( HeaderBlock headerBlock , String documentReference ) { } } | // Create the link to target the header anchor
DocumentResourceReference reference = new DocumentResourceReference ( documentReference ) ; reference . setAnchor ( headerBlock . getId ( ) ) ; LinkBlock linkBlock = new LinkBlock ( this . tocBlockFilter . generateLabel ( headerBlock ) , reference , false ) ; return new ListItemBlock ( Collections . singletonList ( linkBlock ) ) ; |
public class CXIChunk { /** * extract integer value from an ( byte ) offset */
protected final long getIValue ( int off ) { } } | switch ( _valsz ) { case 1 : return _mem [ off + _ridsz ] & 0xFF ; case 2 : return UDP . get2 ( _mem , off + _ridsz ) ; case 4 : return UDP . get4 ( _mem , off + _ridsz ) ; case 8 : return UDP . get8 ( _mem , off + _ridsz ) ; default : throw H2O . unimpl ( ) ; } |
public class RuleSet { /** * Adds the rule instances .
* @ param digester
* the digester
* @ see org . apache . commons . digester3 . RuleSetBase # addRuleInstances ( org . apache . commons . digester3 . Digester ) */
@ Override public void addRuleInstances ( Digester digester ) { } } | digester . addObjectCreate ( "profiles" , Profiles . class ) ; digester . addObjectCreate ( PROFILES_PROFILE , Profile . class ) ; digester . addObjectCreate ( PROFILES_PROFILE_SETTING , Setting . class ) ; digester . addSetNext ( PROFILES_PROFILE , "addProfile" ) ; digester . addSetNext ( PROFILES_PROFILE_SETTING , "addSetting" ) ; digester . addSetProperties ( PROFILES_PROFILE , "kind" , "kind" ) ; digester . addSetProperties ( PROFILES_PROFILE_SETTING , "id" , "id" ) ; digester . addSetProperties ( PROFILES_PROFILE_SETTING , "value" , "value" ) ; |
public class WebFacesPartialResponseDescriptorImpl { /** * If not already created , a new < code > error < / code > element with the given value will be created .
* Otherwise , the existing < code > error < / code > element will be returned .
* @ return a new or existing instance of < code > PartialResponseErrorType < WebFacesPartialResponseDescriptor > < / code > */
public PartialResponseErrorType < WebFacesPartialResponseDescriptor > getOrCreateError ( ) { } } | Node node = model . getOrCreate ( "error" ) ; PartialResponseErrorType < WebFacesPartialResponseDescriptor > error = new PartialResponseErrorTypeImpl < WebFacesPartialResponseDescriptor > ( this , "error" , model , node ) ; return error ; |
public class AttributeConstraintRule { /** * Checks whether a given value is a valid maximum decimal digit when compared to given value
* or not
* @ param validationObject
* @ param annotate
* @ return */
private boolean validateMaxDecimal ( Object validationObject , Annotation annotate ) { } } | if ( validationObject != null ) { try { if ( checkvalidDeciDigitTypes ( validationObject . getClass ( ) ) ) { BigDecimal maxValue = NumberUtils . createBigDecimal ( ( ( DecimalMax ) annotate ) . value ( ) ) ; BigDecimal actualValue = NumberUtils . createBigDecimal ( toString ( validationObject ) ) ; int res = actualValue . compareTo ( maxValue ) ; if ( res > 0 ) { throwValidationException ( ( ( DecimalMax ) annotate ) . message ( ) ) ; } } } catch ( NumberFormatException nfe ) { throw new RuleValidationException ( nfe . getMessage ( ) ) ; } } return true ; |
public class PojoDescriptorBuilderImpl { /** * This method creates the { @ link PojoDescriptorImpl } for the { @ link Class # getSuperclass ( ) superclass } of
* { @ code pojoClass } and merges { @ link PojoPropertyDescriptorImpl } from inherited properties .
* @ param < P > is the generic type of { @ code pojoClass } .
* @ param pojoClass is the { @ link Class } for the { @ link PojoDescriptorImpl # getPojoType ( ) pojo type } .
* @ param descriptor is the { @ link PojoDescriptorImpl } to merge . */
private < P > void mergeDescriptorWithSuperClass ( Class < P > pojoClass , PojoDescriptorImpl < P > descriptor ) { } } | // create descriptor for super - class to reuse information ( # 55)
Class < ? super P > superClass = pojoClass . getSuperclass ( ) ; PojoDescriptorImpl < ? > superDescriptor = null ; if ( superClass != null ) { GenericType < ? > superType = getReflectionUtil ( ) . createGenericType ( superClass , descriptor . getPojoType ( ) ) ; superDescriptor = createDescriptor ( superType ) ; for ( PojoPropertyDescriptorImpl superPropertyDescriptor : superDescriptor . getPropertyDescriptors ( ) ) { PojoPropertyDescriptorImpl propertyDescriptor = descriptor . getPropertyDescriptor ( superPropertyDescriptor . getName ( ) ) ; if ( propertyDescriptor == null ) { descriptor . addPropertyDescriptor ( superPropertyDescriptor ) ; } else { mergePropertyDescriptorWithSuperClass ( propertyDescriptor , superPropertyDescriptor ) ; } } } |
public class JStormMetrics { /** * convert snapshots to thrift objects , note that timestamps are aligned to min during the conversion ,
* so nimbus server will get snapshots with aligned timestamps ( still in ms as TDDL will use it ) . */
public static MetricInfo computeAllMetrics ( ) { } } | long start = System . currentTimeMillis ( ) ; MetricInfo metricInfo = MetricUtils . mkMetricInfo ( ) ; List < Map . Entry < String , AsmMetric > > entries = Lists . newLinkedList ( ) ; if ( enableStreamMetrics ) { entries . addAll ( streamMetrics . metrics . entrySet ( ) ) ; } entries . addAll ( taskMetrics . metrics . entrySet ( ) ) ; entries . addAll ( componentMetrics . metrics . entrySet ( ) ) ; entries . addAll ( compStreamMetrics . metrics . entrySet ( ) ) ; entries . addAll ( workerMetrics . metrics . entrySet ( ) ) ; entries . addAll ( nettyMetrics . metrics . entrySet ( ) ) ; entries . addAll ( topologyMetrics . metrics . entrySet ( ) ) ; for ( Map . Entry < String , AsmMetric > entry : entries ) { String name = entry . getKey ( ) ; AsmMetric metric = entry . getValue ( ) ; // skip disabled metrics , double check
if ( disabledMetricNames . contains ( metric . getShortName ( ) ) ) { continue ; } Map < Integer , AsmSnapshot > snapshots = metric . getSnapshots ( ) ; if ( snapshots . size ( ) == 0 ) { continue ; } int op = metric . getOp ( ) ; if ( ( op & AsmMetric . MetricOp . LOG ) == AsmMetric . MetricOp . LOG ) { MetricUtils . printMetricSnapshot ( metric , snapshots ) ; } if ( ( op & AsmMetric . MetricOp . REPORT ) == AsmMetric . MetricOp . REPORT ) { MetaType metaType = MetricUtils . metaType ( metric . getMetricName ( ) ) ; try { if ( metric instanceof AsmCounter ) { Map data = MetricUtils . toThriftCounterSnapshots ( snapshots ) ; putIfNotEmpty ( metricInfo . get_metrics ( ) , name , data ) ; } else if ( metric instanceof AsmGauge ) { Map data = MetricUtils . toThriftGaugeSnapshots ( snapshots ) ; putIfNotEmpty ( metricInfo . get_metrics ( ) , name , data ) ; } else if ( metric instanceof AsmMeter ) { Map data = MetricUtils . toThriftMeterSnapshots ( snapshots ) ; putIfNotEmpty ( metricInfo . get_metrics ( ) , name , data ) ; } else if ( metric instanceof AsmHistogram ) { Map data = MetricUtils . toThriftHistoSnapshots ( metaType , snapshots ) ; putIfNotEmpty ( metricInfo . get_metrics ( ) , name , data ) ; } } catch ( Exception ex ) { LOG . error ( "Error" , ex ) ; } } } if ( debug ) { MetricUtils . printMetricInfo ( metricInfo , debugMetricNames ) ; } LOG . debug ( "compute all metrics, cost:{}" , System . currentTimeMillis ( ) - start ) ; return metricInfo ; |
public class MigrationServiceRestAdapter { /** * TODO placeholder for future implementation
* Migrates a single bounce proxy to a different cluster .
* @ param bpId bounce proxy id
* @ return response with status 501 ( not implemented ) */
@ DELETE @ Path ( "/bps/{bpid}" ) public Response migrateBounceProxy ( @ PathParam ( "bpid" ) String bpId ) { } } | return Response . status ( 501 /* Not Implemented */
) . build ( ) ; |
public class TCPMemcachedNodeImpl { /** * ( non - Javadoc )
* @ see net . spy . memcached . MemcachedNode # writeSome ( ) */
public final int writeSome ( ) throws IOException { } } | int wrote = channel . write ( wbuf ) ; assert wrote >= 0 : "Wrote negative bytes?" ; toWrite -= wrote ; assert toWrite >= 0 : "toWrite went negative after writing " + wrote + " bytes for " + this ; getLogger ( ) . debug ( "Wrote %d bytes" , wrote ) ; return wrote ; |
public class SimpleIOHandler { /** * Writes the XML representation of individual BioPAX element that
* is BioPAX - like but only for display or debug purpose ( incomplete ) .
* Note : use { @ link BioPAXIOHandler # convertToOWL ( org . biopax . paxtools . model . Model , java . io . OutputStream ) }
* convertToOWL ( org . biopax . paxtools . model . Model , Object ) } instead
* if you have a model and want to save and later restore it .
* @ param out output
* @ param bean BioPAX object
* @ throws IOException when the output writer throws */
public void writeObject ( Writer out , BioPAXElement bean ) throws IOException { } } | String name = "bp:" + bean . getModelInterface ( ) . getSimpleName ( ) ; writeIDLine ( out , bean , name ) ; Set < PropertyEditor > editors = editorMap . getEditorsOf ( bean ) ; if ( editors == null || editors . isEmpty ( ) ) { log . info ( "no editors for " + bean . getUri ( ) + " | " + bean . getModelInterface ( ) . getSimpleName ( ) ) ; out . write ( newline + "</" + name + ">" ) ; return ; } for ( PropertyEditor editor : editors ) { Set value = editor . getValueFromBean ( bean ) ; // is never null
for ( Object valueElement : value ) { if ( ! editor . isUnknown ( valueElement ) ) writeStatementFor ( bean , editor , valueElement , out ) ; } } out . write ( newline + "</" + name + ">" ) ; |
public class PeriodUtil { /** * Converts a period string to a time .
* < table >
* < tr > < td > ms < td > milliseconds
* < tr > < td > s < td > seconds
* < tr > < td > m < td > minutes
* < tr > < td > h < td > hours
* < tr > < td > D < td > days
* < tr > < td > W < td > weeks
* < tr > < td > M < td > months
* < tr > < td > Y < td > years
* < / table > */
public static long toPeriod ( String value , long defaultUnits ) throws ConfigException { } } | if ( value == null ) return 0 ; long sign = 1 ; long period = 0 ; int i = 0 ; int length = value . length ( ) ; if ( length > 0 && value . charAt ( i ) == '-' ) { sign = - 1 ; i ++ ; } while ( i < length ) { long delta = 0 ; char ch ; for ( ; i < length && ( ch = value . charAt ( i ) ) >= '0' && ch <= '9' ; i ++ ) delta = 10 * delta + ch - '0' ; if ( length <= i ) period += defaultUnits * delta ; else { ch = value . charAt ( i ++ ) ; switch ( ch ) { case 's' : period += 1000 * delta ; break ; case 'm' : if ( i < value . length ( ) && value . charAt ( i ) == 's' ) { i ++ ; period += delta ; } else period += 60 * 1000 * delta ; break ; case 'h' : period += 60L * 60 * 1000 * delta ; break ; case 'D' : period += DAY * delta ; break ; case 'W' : period += 7L * DAY * delta ; break ; case 'M' : period += 30L * DAY * delta ; break ; case 'Y' : period += 365L * DAY * delta ; break ; default : throw new ConfigException ( L . l ( "Unknown unit `{0}' in period `{1}'. Valid units are:\n '10ms' milliseconds\n '10s' seconds\n '10m' minutes\n '10h' hours\n '10D' days\n '10W' weeks\n '10M' months\n '10Y' years" , String . valueOf ( ch ) , value ) ) ; } } } period = sign * period ; // server / 137w
/* if ( period < 0)
return INFINITE ;
else
return period ; */
return period ; |
public class IOUtil { /** * call close method from any Object with a close method .
* @ param obj */
public static void closeEL ( Object obj ) { } } | if ( obj instanceof InputStream ) IOUtil . closeEL ( ( InputStream ) obj ) ; else if ( obj instanceof OutputStream ) IOUtil . closeEL ( ( OutputStream ) obj ) ; else if ( obj instanceof Writer ) IOUtil . closeEL ( ( Writer ) obj ) ; else if ( obj instanceof Reader ) IOUtil . closeEL ( ( Reader ) obj ) ; else if ( obj instanceof Closeable ) IOUtil . closeEL ( ( Closeable ) obj ) ; else if ( obj instanceof ZipFile ) IOUtil . closeEL ( ( ZipFile ) obj ) ; else if ( obj instanceof ResultSet ) IOUtil . closeEL ( ( ResultSet ) obj ) ; else if ( obj instanceof Connection ) IOUtil . closeEL ( ( Connection ) obj ) ; else { try { Method method = obj . getClass ( ) . getMethod ( "close" , new Class [ 0 ] ) ; method . invoke ( obj , new Object [ 0 ] ) ; } catch ( Throwable e ) { ExceptionUtil . rethrowIfNecessary ( e ) ; } } |
public class CssFormatter { /** * Parse the parameter rewrite URL
* @ return 0 , 1 or 2 */
private int parseRewriteUrl ( ) { } } | String rewrite = options . get ( Less . REWRITE_URLS ) ; if ( rewrite != null ) { switch ( rewrite . toLowerCase ( ) ) { case "off" : return 0 ; case "local" : return 1 ; case "all" : return 2 ; } } return 0 ; |
public class DataLabelingServiceClient { /** * Imports data into dataset based on source locations defined in request . It can be called
* multiple times for the same dataset . Each dataset can only have one long running operation
* running on it . For example , no labeling task ( also long running operation ) can be started while
* importing is still ongoing . Vice versa .
* < p > Sample code :
* < pre > < code >
* try ( DataLabelingServiceClient dataLabelingServiceClient = DataLabelingServiceClient . create ( ) ) {
* String formattedName = DataLabelingServiceClient . formatDatasetName ( " [ PROJECT ] " , " [ DATASET ] " ) ;
* InputConfig inputConfig = InputConfig . newBuilder ( ) . build ( ) ;
* ImportDataRequest request = ImportDataRequest . newBuilder ( )
* . setName ( formattedName )
* . setInputConfig ( inputConfig )
* . build ( ) ;
* ImportDataOperationResponse response = dataLabelingServiceClient . importDataAsync ( request ) . get ( ) ;
* < / code > < / pre >
* @ param request The request object containing all of the parameters for the API call .
* @ throws com . google . api . gax . rpc . ApiException if the remote call fails */
@ BetaApi ( "The surface for long-running operations is not stable yet and may change in the future." ) public final OperationFuture < ImportDataOperationResponse , ImportDataOperationMetadata > importDataAsync ( ImportDataRequest request ) { } } | return importDataOperationCallable ( ) . futureCall ( request ) ; |
public class PathResolver { /** * Similar to { @ link Path # relativize ( Path ) } except that :
* < ul >
* < li > Empty is returned if file is not a child of dir
* < li > the resulting path is converted to use Unix separators
* < / ul >
* @ since 6.6 */
public static Optional < String > relativize ( Path dir , Path file ) { } } | Path baseDir = dir . normalize ( ) ; Path path = file . normalize ( ) ; if ( ! path . startsWith ( baseDir ) ) { return Optional . empty ( ) ; } try { Path relativized = baseDir . relativize ( path ) ; return Optional . of ( FilenameUtils . separatorsToUnix ( relativized . toString ( ) ) ) ; } catch ( IllegalArgumentException e ) { return Optional . empty ( ) ; } |
public class CommandBuilder { /** * Finds similar option by name or short name
* @ param option to provide info
* @ return found option or null if none found */
private CommandOption find ( CommandOption option ) { } } | Assert . notNull ( option , "Missing option!" ) ; Optional < CommandOption > found = options . stream ( ) . filter ( o -> o . is ( option ) ) . findFirst ( ) ; return found . orElse ( null ) ; |
public class SessionContext { /** * Called by webcontainer to do session cleanup , once per webapp per request .
* If multiple apps are involved due to forwards / includes , multiple
* sessionPostInvoke
* calls are all made at then end of the request . We do NOT call
* sessionPostInvoke
* immediately when a dispatch ends . Therefore , some of this processing - such
* as
* calling unlockSession or setting the crossover threadlocal to null - may be
* done
* multiple times . This is ok . */
public void sessionPostInvoke ( HttpSession sess ) { } } | if ( com . ibm . ejs . ras . TraceComponent . isAnyTracingEnabled ( ) && LoggingUtil . SESSION_LOGGER_CORE . isLoggable ( Level . FINE ) ) { LoggingUtil . SESSION_LOGGER_CORE . entering ( methodClassName , methodNames [ SESSION_POST_INVOKE ] ) ; } SessionData s = ( SessionData ) sess ; if ( _smc . getAllowSerializedSessionAccess ( ) ) { unlockSession ( sess ) ; } if ( s != null ) { synchronized ( s ) { SessionAffinityContext sac = null ; _coreHttpSessionManager . releaseSession ( s . getISession ( ) , sac ) ; if ( _coreHttpAppSessionManager != null ) { // try and get the Application Session in memory . . . if it is there ,
// make sure you update the backend via releaseSession
ISession iSess = ( ISession ) _coreHttpAppSessionManager . getIStore ( ) . getFromMemory ( s . getId ( ) ) ; if ( iSess != null ) { // iSess . decrementRefCount ( ) ;
_coreHttpAppSessionManager . releaseSession ( iSess , sac ) ; } } } } if ( _smc . isDebugSessionCrossover ( ) ) { currentThreadSacHashtable . set ( null ) ; } if ( com . ibm . ejs . ras . TraceComponent . isAnyTracingEnabled ( ) && LoggingUtil . SESSION_LOGGER_CORE . isLoggable ( Level . FINE ) ) { LoggingUtil . SESSION_LOGGER_CORE . exiting ( methodClassName , methodNames [ SESSION_POST_INVOKE ] ) ; } |
public class JsApiHdrsImpl { /** * Get the contents of the CorrelationId field from the message header .
* Javadoc description supplied by JsApiMessage interface . */
public final String getCorrelationId ( ) { } } | if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( this , tc , "getCorrelationId" ) ; String value = null ; int choice = getApi ( ) . getChoiceField ( JsApiAccess . CORRELATIONID ) ; /* If it is stored as a String , just extract it */
if ( choice == JsApiAccess . IS_CORRELATIONID_STRINGVALUE ) { value = ( String ) getApi ( ) . getField ( JsApiAccess . CORRELATIONID_STRINGVALUE ) ; } /* If it is stored as a binary , we have to convert it */
else if ( choice == JsApiAccess . IS_CORRELATIONID_BINARYVALUE ) { byte [ ] binValue = ( byte [ ] ) getApi ( ) . getField ( JsApiAccess . CORRELATIONID_BINARYVALUE ) ; if ( binValue != null ) { /* It ' ll be more economical to get the length right immediately */
StringBuffer sbuf = new StringBuffer ( ( binValue . length * 2 ) + 3 ) ; /* Insert the ID : then add on the binary value as a hex string */
sbuf . append ( ID_STRING ) ; HexString . binToHex ( binValue , 0 , binValue . length , sbuf ) ; /* Return the String representation */
value = sbuf . toString ( ) ; } } /* If the choice was ' Empty ' do nothing as value is already null . */
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( this , tc , "getCorrelationId" , value ) ; return value ; |
public class ProductSearchResultUrl { /** * Get Resource Url for Suggest
* @ param groups Specifies the group that you want this operation to return in the response . This parameter accepts one or more values , separated by comma . For example , if you set this parameter to , then this operation returns a object that contains suggestions for products that match the user entered characters in the search field . The valid values for this parameter are the following :
* â € ” Indicates that products should be matched against and returned in the response . The search value is compared against product name and code . The response contains a with a name of and a collection of . Each collection item has a of Product and contains a equal to a complete product object .
* â € ” Indicates that categories should be matched against and returned in the response . The search value is compared to category name . The response contains a with a name of and a collection of . Each collection item has a of Category and contains a equal to a complete category object .
* â € ” Indicates that previously used search terms ( keywords ) should be matched against and returned in the response , sorted by frequency of use . Keep in mind that it is not currently possible to edit or remove search terms that may be considered undesirable via the API . The response contains a with a name of and a collection of . Each collection item has a of Term and contains a equal to a string value of the matched search term .
* The default value is ; however , the Core Theme only integrates and ignores the group . This operation only returns data that is then made available to your theme . If you set this paramter to multiple values , returns multiple in the response . Depending on your requirements , you can then customize your theme to display the groups together or as separate lists in the displayed search suggestions .
* @ param pageSize When creating paged results from a query , this value indicates the zero - based offset in the complete result set where the returned entities begin . For example , with this parameter set to 25 , to get the 51st through the 75th items , set startIndex to 50.
* @ param query Properties for the product location inventory provided for queries to locate products by their location .
* @ param responseFields Filtering syntax appended to an API call to increase or decrease the amount of data returned inside a JSON object . This parameter should only be used to retrieve data . Attempting to update data using this parameter may cause data loss .
* @ return String Resource Url */
public static MozuUrl suggestUrl ( String groups , Integer pageSize , String query , String responseFields ) { } } | UrlFormatter formatter = new UrlFormatter ( "/api/commerce/catalog/storefront/productsearch/suggest?query={query}&groups={groups}&pageSize={pageSize}&responseFields={responseFields}" ) ; formatter . formatUrl ( "groups" , groups ) ; formatter . formatUrl ( "pageSize" , pageSize ) ; formatter . formatUrl ( "query" , query ) ; formatter . formatUrl ( "responseFields" , responseFields ) ; return new MozuUrl ( formatter . getResourceUrl ( ) , MozuUrl . UrlLocation . TENANT_POD ) ; |
public class BufferUtils { /** * Create a new ByteBuffer using the provided byte array .
* @ param array the byte array to use .
* @ param offset the offset within the byte array to use from
* @ param length the length in bytes of the array to use
* @ return ByteBuffer with provided byte array , in flush mode */
public static ByteBuffer toBuffer ( byte array [ ] , int offset , int length ) { } } | if ( array == null ) return EMPTY_BUFFER ; return ByteBuffer . wrap ( array , offset , length ) ; |
public class Font { /** * Return a java . awt . Font style constant from FontStyle .
* @ param style
* FontStyle value .
* @ return A java . awt . Font style constant . */
private static int fontStyleToInt ( FontStyle style ) { } } | switch ( style ) { case PLAIN : return java . awt . Font . PLAIN ; case BOLD : return java . awt . Font . BOLD ; case ITALIC : return java . awt . Font . ITALIC ; default : return java . awt . Font . PLAIN ; } |
public class DatanodeDescriptor { /** * Remove the specified number of blocks to be invalidated */
BlockCommand getInvalidateBlocks ( int maxblocks ) { } } | Block [ ] deleteList = null ; synchronized ( invalidateBlocks ) { deleteList = invalidateBlocks . pollToArray ( new Block [ Math . min ( invalidateBlocks . size ( ) , maxblocks ) ] ) ; } return ( deleteList == null || deleteList . length == 0 ) ? null : new BlockCommand ( DatanodeProtocol . DNA_INVALIDATE , deleteList ) ; |
public class ResolutionUtils { /** * Method performing device address resolution
* @ param deviceName
* @ param networkInterfaces
* @ param allowIPv6
* @ return */
private static List < String > resolveDeviceAddress ( String deviceName , Enumeration < NetworkInterface > networkInterfaces , boolean allowIPv6 ) { } } | List < String > resolvedAddresses = new ArrayList < > ( ) ; if ( deviceName . startsWith ( "[@" ) && deviceName . endsWith ( "]" ) ) { deviceName = deviceName . substring ( 2 , deviceName . lastIndexOf ( ']' ) ) ; } else if ( deviceName . startsWith ( "@" ) ) { deviceName = deviceName . substring ( 1 ) ; } while ( networkInterfaces . hasMoreElements ( ) ) { NetworkInterface networkInterface = networkInterfaces . nextElement ( ) ; if ( deviceName . toLowerCase ( ) . equals ( networkInterface . getDisplayName ( ) . toLowerCase ( ) ) ) { Enumeration < InetAddress > inetAddresses = networkInterface . getInetAddresses ( ) ; while ( inetAddresses . hasMoreElements ( ) ) { InetAddress inetAddress = inetAddresses . nextElement ( ) ; if ( inetAddress instanceof Inet6Address ) { if ( ! allowIPv6 ) { continue ; } String inet6HostAddress = inetAddress . getHostAddress ( ) ; resolvedAddresses . add ( String . format ( "[%s]" , inet6HostAddress ) ) ; } else { resolvedAddresses . add ( inetAddress . getHostAddress ( ) ) ; } } } // add an internal URI for any sub interfaces that match the hostAddress
Enumeration < NetworkInterface > subInterfaces = networkInterface . getSubInterfaces ( ) ; if ( subInterfaces . hasMoreElements ( ) ) { resolvedAddresses . addAll ( resolveDeviceAddress ( deviceName , subInterfaces , allowIPv6 ) ) ; } } return resolvedAddresses ; |
public class MergeOperation { /** * Sets the ringbuffer data given by the { @ code fromMergeData } to the
* { @ code toContainer } .
* @ param fromMergeData the data which needs to be set into the containter
* @ param toContainer the target ringbuffer container */
private void setRingbufferData ( RingbufferMergeData fromMergeData , RingbufferContainer < Object , Object > toContainer ) { } } | boolean storeEnabled = toContainer . getStore ( ) . isEnabled ( ) ; Data [ ] storeItems = storeEnabled ? new Data [ fromMergeData . size ( ) ] : null ; toContainer . setHeadSequence ( fromMergeData . getHeadSequence ( ) ) ; toContainer . setTailSequence ( fromMergeData . getTailSequence ( ) ) ; for ( long seq = fromMergeData . getHeadSequence ( ) ; seq <= fromMergeData . getTailSequence ( ) ; seq ++ ) { final Object resultValue = fromMergeData . read ( seq ) ; toContainer . set ( seq , resultValue ) ; if ( storeEnabled ) { storeItems [ ( int ) ( seq - fromMergeData . getHeadSequence ( ) ) ] = serializationService . toData ( resultValue ) ; } } if ( storeEnabled ) { toContainer . getStore ( ) . storeAll ( fromMergeData . getHeadSequence ( ) , storeItems ) ; } |
public class SourceBuilderImpl { /** * { @ inheritDoc } */
@ Override public SubmitterLink createSubmitterLink ( final GedObject ged , final Submitter submitter ) { } } | if ( ged == null || submitter == null ) { return new SubmitterLink ( ) ; } final SubmitterLink submitterLink = new SubmitterLink ( ged , "Submitter" , new ObjectId ( submitter . getString ( ) ) ) ; ged . insert ( submitterLink ) ; return submitterLink ; |
public class ClientAppBase { /** * Create a Timer task to display performance data on the Vote procedure
* It calls printStatistics ( ) every displayInterval seconds */
protected void schedulePeriodicStats ( ) { } } | m_timer = new Timer ( ) ; TimerTask statsPrinting = new TimerTask ( ) { @ Override public void run ( ) { printStatistics ( ) ; } } ; m_timer . scheduleAtFixedRate ( statsPrinting , m_config . displayinterval * 1000 , m_config . displayinterval * 1000 ) ; |
public class AnnotationUtils { /** * < p > Checks if the specified type is permitted as an annotation member . < / p >
* < p > The Java language specification only permits certain types to be used
* in annotations . These include { @ link String } , { @ link Class } , primitive
* types , { @ link Annotation } , { @ link Enum } , and single - dimensional arrays of
* these types . < / p >
* @ param type the type to check , { @ code null }
* @ return { @ code true } if the type is a valid type to use in an annotation */
@ GwtIncompatible ( "incompatible method" ) public static boolean isValidAnnotationMemberType ( Class < ? > type ) { } } | if ( type == null ) { return false ; } if ( type . isArray ( ) ) { type = type . getComponentType ( ) ; } return type . isPrimitive ( ) || type . isEnum ( ) || type . isAnnotation ( ) || String . class . equals ( type ) || Class . class . equals ( type ) ; |
public class Ebes { /** * public void EbesMutation ( int groupId , int hi , Variable [ ] x ) {
* if ( StrainNxxMin _ ! = null & & StrainNxxMax _ ! = null )
* alturas necearias por tres esfuerzos distintos
* double [ ] Y = { 0.0 , 0.0 , 0.0 } ;
* Ã ¡ rea de la secciÃ3n por tensiÃ3n de compresiÃ3n
* participaciones de las tensiones normales referidas al esfuerzo axil
* double ratioStrainMinNxx = StrainNxxMin _ [ groupId ] [ hi ] / Groups _ [ groupId ] [ COMPRESSION ] ;
* Ã ¡ rea de la secciÃ3n por tensiÃ3n de compresiÃ3n
* double Ac = omegaMax _ [ groupId ] [ hi ] * NxxMin _ [ groupId ] [ hi ] / Groups _ [ groupId ] [ COMPRESSION ] * ratioStrainMinNxx ;
* participaciones de las tensiones normales referidas al esfuerzo axil
* double ratioStrainMaxNxx = StrainNxxMax _ [ groupId ] [ hi ] / Groups _ [ groupId ] [ STRESS ] ;
* Ã ¡ rea de la secciÃ3n por tensiÃ3n de tracciÃ3n
* double At = omegaMax _ [ groupId ] [ hi ] * NxxMax _ [ groupId ] [ hi ] / Groups _ [ groupId ] [ STRESS ] * ratioStrainMaxNxx ;
* à ¡ rea mà ¡ xima necesaria
* double A = Math . max ( Ac , At ) ;
* A * = 10000;
* altura necesaria en funciÃ3n del area y del esfuerzo normal coincidente con el eje x
* Y [ 0 ] = Interpolation _ I _ Single _ Y _ func _ Area _ ( A ) ;
* participaciones de las tensiones normales mÃnimas referidas al momento flector respecto al eje z
* double ratioStrainMinMxz = StrainMxzMin _ [ groupId ] [ hi ] / Groups _ [ groupId ] [ COMPRESSION ] ;
* mÃ3dulo resistente por tensiÃ3n de compresiÃ3n respecto al momento flector Mxz
* double Wzc = MxzMin _ [ groupId ] [ hi ] / Groups _ [ groupId ] [ COMPRESSION ] * ratioStrainMinMxz ;
* participaciones de las tensiones normales mà ¡ ximas referidas al momento flector respecto al eje z
* double ratioStrainMaxMxz = StrainMxzMax _ [ groupId ] [ hi ] / Groups _ [ groupId ] [ STRESS ] ;
* mÃ3dulo resistente por tensiÃ3n de tracciÃ3n respecto al momento flector Mxz
* double Wzt = MxzMax _ [ groupId ] [ hi ] / Groups _ [ groupId ] [ STRESS ] * ratioStrainMaxMxz ;
* mÃ3dulo resistente mà ¡ ximo necesario
* double Wxz = Math . max ( Wzc , Wzt ) ;
* conversiÃ3n de unidades de medidas a cm3
* Wxz * = 100000;
* altura necesaria en funciÃ3n del mÃ3dulo resistente y del momento flector respecto al eje z
* Y [ 1 ] = Interpolation _ I _ Single _ Y _ func _ Wxz _ ( Wxz ) ;
* participaciones de las tensiones normales mÃnimas referidas al momento flector respecto al eje y
* double ratioStrainMinMxy = StrainMxyMin _ [ groupId ] [ hi ] / Groups _ [ groupId ] [ COMPRESSION ] ;
* mÃ3dulo resistente por tensiÃ3n de compresiÃ3n respecto al momento flector Mxz
* double Wyc = MxyMin _ [ groupId ] [ hi ] / Groups _ [ groupId ] [ COMPRESSION ] * ratioStrainMinMxy ;
* participaciones de las tensiones normales mà ¡ ximas referidas al momento flector respecto al eje y
* double ratioStrainMaxMxy = StrainMxyMax _ [ groupId ] [ hi ] / Groups _ [ groupId ] [ STRESS ] ;
* mÃ3dulo resistente por tensiÃ3n de tracciÃ3n respecto al momento flector Mxz
* double Wyt = MxyMax _ [ groupId ] [ hi ] / Groups _ [ groupId ] [ STRESS ] * ratioStrainMaxMxy ;
* mÃ3dulo resistente mà ¡ ximo necesario
* double Wxy = Math . max ( Wyc , Wyt ) ;
* conversiÃ3n de unidades de medidas a cm3
* Wxy * = 100000;
* altura necesaria en funciÃ3n del mÃ3dulo resistente y del momento flector respecto al eje z
* Y [ 2 ] = Interpolation _ I _ Single _ Y _ func _ Wxy _ ( Wxy ) ;
* altura mà ¡ xima necesaria
* double y = 0.0;
* for ( int i = 1 ; i < Y . length ; i + + ) {
* y = Math . max ( y , Y [ i ] ) ;
* double z = Interpolation _ I _ Single _ Z _ func _ Y _ ( y ) ;
* double ey = Interpolation _ I _ Single _ ey _ func _ Y _ ( y ) ;
* double ez = Interpolation _ I _ Single _ ez _ func _ Y _ ( y ) ;
* int variableIndex = getVariablePosition ( groupId ) ;
* conversiÃ3n de unidades de medidas al sistema de cà ¡ lculo
* y * = 0.001;
* if ( y < x [ variableIndex ] . getLowerBound ( ) )
* y = x [ variableIndex ] . getLowerBound ( ) ;
* if ( y > x [ variableIndex ] . getUpperBound ( ) )
* y = x [ variableIndex ] . getUpperBound ( ) ;
* z * = 0.001;
* ey * = 0.001;
* ez * = 0.001;
* x [ variableIndex ] . setValue ( y ) ;
* x [ variableIndex + 1 ] . setValue ( z ) ;
* x [ variableIndex + 2 ] . setValue ( ey ) ;
* x [ variableIndex + 3 ] . setValue ( ez ) ; */
public double Interpolation_I_Single_Y_func_Area_ ( double A ) { } } | // A ( cm2 ) es el area necesaria para cubrir la tensiÃ3n
// Y ( mm ) es la latura relacionada al eje y
double Y = 0 ; // se limita la interpolaciÃ3n
if ( 5.0 < A && A < 1000.0 ) { Y = 0.000003 * Math . pow ( A , 3 ) - 0.0063 * Math . pow ( A , 2 ) + 4.1118 * A + 75.414 ; } return Y ; |
public class ExpectSteps { /** * Expects that the target page is completely loaded .
* @ return true or false */
public static ExpectedCondition < Boolean > waitForLoad ( ) { } } | return new ExpectedCondition < Boolean > ( ) { @ Override public Boolean apply ( WebDriver driver ) { return ( ( JavascriptExecutor ) driver ) . executeScript ( "return document.readyState" ) . equals ( "complete" ) ; } } ; |
public class JrsValueCursor { /** * Method called to create a new context for iterating all
* contents of the current structured value ( JSON array or object ) */
public final JrsValueCursor iterateChildren ( ) { } } | JrsValue n = currentNode ( ) ; if ( n == null ) throw new IllegalStateException ( "No current node" ) ; if ( n . isArray ( ) ) { // false since we have already returned START _ ARRAY
return new ArrayCursor ( ( JrsArray ) n , this ) ; } if ( n . isObject ( ) ) { return new ObjectCursor ( ( JrsObject ) n , this ) ; } throw new IllegalStateException ( "Current node of type " + n . getClass ( ) . getName ( ) ) ; |
public class AzureFirewallsInner { /** * Deletes the specified Azure Firewall .
* @ param resourceGroupName The name of the resource group .
* @ param azureFirewallName The name of the Azure Firewall .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ throws CloudException thrown if the request is rejected by server
* @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent */
public void beginDelete ( String resourceGroupName , String azureFirewallName ) { } } | beginDeleteWithServiceResponseAsync ( resourceGroupName , azureFirewallName ) . toBlocking ( ) . single ( ) . body ( ) ; |
public class NavigationView { /** * Should be called after { @ link NavigationView # onCreate ( Bundle ) } .
* This method adds the { @ link OnNavigationReadyCallback } ,
* which will fire the ready events for this view .
* @ param onNavigationReadyCallback to be set to this view */
public void initialize ( OnNavigationReadyCallback onNavigationReadyCallback ) { } } | this . onNavigationReadyCallback = onNavigationReadyCallback ; if ( ! isMapInitialized ) { mapView . getMapAsync ( this ) ; } else { onNavigationReadyCallback . onNavigationReady ( navigationViewModel . isRunning ( ) ) ; } |
public class RequestParam { /** * Returns a request parameter as integer .
* @ param request Request .
* @ param param Parameter name .
* @ return Parameter value or 0 if it does not exist or is not a number . */
public static int getInt ( @ NotNull ServletRequest request , @ NotNull String param ) { } } | return getInt ( request , param , 0 ) ; |
public class Resty { /** * POST to the URI and get the resource as binary resource .
* @ param uri
* the uri to follow
* @ return
* @ throws IOException */
public BinaryResource bytes ( URI anUri , AbstractContent someContent ) throws IOException { } } | return doPOSTOrPUT ( anUri , someContent , createBinaryResource ( ) ) ; |
public class ToolsScopeComponent { /** * Save all configured settings */
public void saveSettings ( ) { } } | mCallbacks . saveExtensionSetting ( SETTING_PROXY , setSetting ( jCheckBoxProxy . isSelected ( ) ) ) ; mCallbacks . saveExtensionSetting ( SETTING_REPEATER , setSetting ( jCheckBoxRepeater . isSelected ( ) ) ) ; mCallbacks . saveExtensionSetting ( SETTING_SCANNER , setSetting ( jCheckBoxScanner . isSelected ( ) ) ) ; mCallbacks . saveExtensionSetting ( SETTING_INTRUDER , setSetting ( jCheckBoxIntruder . isSelected ( ) ) ) ; mCallbacks . saveExtensionSetting ( SETTING_SEQUENCER , setSetting ( jCheckBoxSequencer . isSelected ( ) ) ) ; mCallbacks . saveExtensionSetting ( SETTING_SPIDER , setSetting ( jCheckBoxSpider . isSelected ( ) ) ) ; mCallbacks . saveExtensionSetting ( SETTING_EXTENDER , setSetting ( jCheckBoxExtender . isSelected ( ) ) ) ; |
public class Mongo { /** * Get default mongodb database reference or initiate it if not initialized .
* @ return the default mongodb database reference */
public static MongoDatabase getOrInitDefaultDatabase ( ) { } } | String connectionString = XianConfig . get ( "mongodb_connection_string" ) ; String database = XianConfig . get ( "mongodb_database" ) ; return getOrInitDefaultDatabase ( connectionString , database ) ; |
public class NamespaceChangeStreamListener { /** * Returns the latest change events , and clears them from the change stream listener .
* @ return the latest change events . */
@ SuppressWarnings ( "unchecked" ) public Map < BsonValue , ChangeEvent < BsonDocument > > getEvents ( ) { } } | nsLock . readLock ( ) . lock ( ) ; final Map < BsonValue , ChangeEvent < BsonDocument > > events ; try { events = new HashMap < > ( this . events ) ; } finally { nsLock . readLock ( ) . unlock ( ) ; } nsLock . writeLock ( ) . lock ( ) ; try { this . events . clear ( ) ; return events ; } finally { nsLock . writeLock ( ) . unlock ( ) ; } |
public class FullscreenVideoView { /** * TextureView */
@ TargetApi ( Build . VERSION_CODES . ICE_CREAM_SANDWICH ) @ Override public void onSurfaceTextureAvailable ( SurfaceTexture surfaceTexture , int width , int height ) { } } | Log . d ( TAG , "onSurfaceTextureAvailable - state: " + this . currentState ) ; Surface surface = new Surface ( surfaceTexture ) ; if ( this . mediaPlayer != null ) { this . mediaPlayer . setSurface ( surface ) ; // If is not prepared yet - tryToPrepare ( )
if ( ! this . surfaceIsReady ) { this . surfaceIsReady = true ; if ( this . currentState == State . INITIALIZED || this . currentState == State . PREPARING ) tryToPrepare ( ) ; } } |
public class SMailConventionReceptionist { protected void verifyFormat ( String bodyFile , String plainText , String delimiter ) { } } | final String meta = Srl . substringFirstFront ( plainText , delimiter ) ; if ( ! meta . endsWith ( LF ) ) { // also CRLF checked
throwBodyMetaNoIndependentDelimiterException ( bodyFile , plainText ) ; } final int rearIndex = plainText . indexOf ( delimiter ) + delimiter . length ( ) ; if ( plainText . length ( ) > rearIndex ) { // just in case ( empty mail possible ? )
final String rearFirstStr = plainText . substring ( rearIndex , rearIndex + 1 ) ; if ( ! Srl . equalsPlain ( rearFirstStr , LF , CR ) ) { // e . g . > > > Hello , . . .
throwBodyMetaNoIndependentDelimiterException ( bodyFile , plainText ) ; } } if ( ! meta . startsWith ( COMMENT_BEGIN ) ) { // also leading spaces not allowed
throwBodyMetaNotStartWithHeaderCommentException ( bodyFile , plainText , meta ) ; } if ( ! meta . contains ( COMMENT_END ) ) { throwBodyMetaHeaderCommentEndMarkNotFoundException ( bodyFile , plainText , meta ) ; } final String headerComment = Srl . extractScopeFirst ( plainText , COMMENT_BEGIN , COMMENT_END ) . getContent ( ) ; final ScopeInfo titleScope = Srl . extractScopeFirst ( headerComment , TITLE_BEGIN , TITLE_END ) ; if ( titleScope == null ) { throwBodyMetaTitleCommentNotFoundException ( bodyFile , plainText ) ; } final String desc = Srl . substringFirstRear ( headerComment , TITLE_END ) ; if ( desc . isEmpty ( ) ) { throwBodyMetaDescriptionCommentNotFoundException ( bodyFile , plainText ) ; } final String rearMeta = Srl . substringFirstRear ( meta , COMMENT_END ) ; // no way because of already checked
// if ( ! rearMeta . contains ( LF ) ) {
final List < String > splitList = Srl . splitList ( rearMeta , LF ) ; if ( ! splitList . get ( 0 ) . trim ( ) . isEmpty ( ) ) { // after ' * / '
throwBodyMetaHeaderCommentEndMarkNoIndependentException ( bodyFile , plainText ) ; } if ( ! splitList . get ( 1 ) . startsWith ( SUBJECT_LABEL ) ) { // also leading spaces not allowed
throwBodyMetaSubjectNotFoundException ( bodyFile , plainText ) ; } final int nextIndex = 2 ; if ( splitList . size ( ) > nextIndex ) { // after subject
final List < String > nextList = splitList . subList ( nextIndex , splitList . size ( ) ) ; final int nextSize = nextList . size ( ) ; int index = 0 ; for ( String line : nextList ) { if ( index == nextSize - 1 ) { // last loop
if ( line . isEmpty ( ) ) { // empty line only allowed in last loop
break ; } } if ( ! allowedPrefixList . stream ( ) . anyMatch ( prefix -> line . startsWith ( prefix ) ) ) { throwBodyMetaUnknownLineException ( bodyFile , plainText , line ) ; } if ( line . startsWith ( OPTION_LABEL ) ) { final String options = Srl . substringFirstRear ( line , OPTION_LABEL ) ; final List < String > optionList = Srl . splitListTrimmed ( options , "." ) ; for ( String option : optionList ) { if ( ! optionSet . contains ( option ) ) { throwBodyMetaUnknownOptionException ( bodyFile , plainText , option ) ; } } } ++ index ; } } |
public class LazyIterate { /** * Creates a deferred tap iterable for the specified iterable .
* @ since 6.0 */
public static < T > LazyIterable < T > tap ( Iterable < T > iterable , Procedure < ? super T > procedure ) { } } | return new TapIterable < T > ( iterable , procedure ) ; |
public class GrpcServiceBuilder { /** * Adds gRPC { @ link BindableService } s to this { @ link GrpcServiceBuilder } . Most gRPC service
* implementations are { @ link BindableService } s . */
public GrpcServiceBuilder addServices ( BindableService ... bindableServices ) { } } | requireNonNull ( bindableServices , "bindableServices" ) ; return addServices ( ImmutableList . copyOf ( bindableServices ) ) ; |
public class IonReaderTextSystemX { /** * into a base class ( the * Value ( ) methods also share a lot of similarity ) . */
public IntegerSize getIntegerSize ( ) { } } | load_once ( ) ; if ( _value_type != IonType . INT || _v . isNull ( ) ) { return null ; } return _Private_ScalarConversions . getIntegerSize ( _v . getAuthoritativeType ( ) ) ; |
public class FailoverStrategyLoader { /** * Loads a FailoverStrategy Factory from the given configuration . */
public static FailoverStrategy . Factory loadFailoverStrategy ( Configuration config , @ Nullable Logger logger ) { } } | final String strategyParam = config . getString ( JobManagerOptions . EXECUTION_FAILOVER_STRATEGY ) ; if ( StringUtils . isNullOrWhitespaceOnly ( strategyParam ) ) { if ( logger != null ) { logger . warn ( "Null config value for {} ; using default failover strategy (full restarts)." , JobManagerOptions . EXECUTION_FAILOVER_STRATEGY . key ( ) ) ; } return new RestartAllStrategy . Factory ( ) ; } else { switch ( strategyParam . toLowerCase ( ) ) { case FULL_RESTART_STRATEGY_NAME : return new RestartAllStrategy . Factory ( ) ; case PIPELINED_REGION_RESTART_STRATEGY_NAME : return new RestartPipelinedRegionStrategy . Factory ( ) ; case INDIVIDUAL_RESTART_STRATEGY_NAME : return new RestartIndividualStrategy . Factory ( ) ; default : // we could interpret the parameter as a factory class name and instantiate that
// for now we simply do not support this
throw new IllegalConfigurationException ( "Unknown failover strategy: " + strategyParam ) ; } } |
public class Optimizer { /** * Remove match any pattern at the end , e . g . , ( { @ code " foo . * $ " = > " foo " } ) . */
static Matcher removeTrailingMatchAny ( Matcher matcher ) { } } | if ( matcher instanceof ZeroOrMoreMatcher ) { ZeroOrMoreMatcher zm = matcher . as ( ) ; boolean atEnd = zm . next ( ) instanceof TrueMatcher || zm . next ( ) instanceof EndMatcher ; if ( atEnd && zm . repeated ( ) instanceof AnyMatcher ) { return TrueMatcher . INSTANCE ; } } return matcher ; |
public class JCudaDriver { /** * Queries attributes of the link between two devices . < br >
* < br >
* Returns in * value the value of the requested attribute attrib of the
* link between srcDevice and dstDevice . The supported attributes are :
* < ul >
* < li > CU _ DEVICE _ P2P _ ATTRIBUTE _ PERFORMANCE _ RANK : A relative value indicating the
* performance of the link between two devices . < / li >
* < li > CU _ DEVICE _ P2P _ ATTRIBUTE _ ACCESS _ SUPPORTED P2P : 1 if P2P Access is enable . < / li >
* < li > CU _ DEVICE _ P2P _ ATTRIBUTE _ NATIVE _ ATOMIC _ SUPPORTED : 1 if Atomic operations over
* the link are supported . < / li >
* < / ul >
* Returns : : CUDA _ ERROR _ INVALID _ DEVICE if srcDevice or dstDevice are not valid
* or if they represent the same device . < br >
* < br >
* Returns : : CUDA _ ERROR _ INVALID _ VALUE if attrib is not valid or if value is
* a null pointer . < br >
* @ param value Returned value of the requested attribute
* @ param attrib The requested attribute of the link between \ p srcDevice and \ p dstDevice .
* @ param srcDevice The source device of the target link .
* @ param dstDevice The destination device of the target link .
* @ return CUDA _ SUCCESS , CUDA _ ERROR _ DEINITIALIZED , CUDA _ ERROR _ NOT _ INITIALIZED ,
* CUDA _ ERROR _ INVALID _ DEVICE , CUDA _ ERROR _ INVALID _ VALUE
* @ see JCudaDriver # cuCtxEnablePeerAccess
* @ see JCudaDriver # cuCtxDisablePeerAccess
* @ see JCudaDriver # cuCtxCanAccessPeer */
public static int cuDeviceGetP2PAttribute ( int value [ ] , int attrib , CUdevice srcDevice , CUdevice dstDevice ) { } } | return checkResult ( cuDeviceGetP2PAttributeNative ( value , attrib , srcDevice , dstDevice ) ) ; |
public class ChatController { /** * Save and send message with attachments .
* @ param conversationId Unique conversation id .
* @ param message Message to send
* @ param attachments List of attachments to send with a message .
* @ return Observable with Chat SDK result . */
Observable < ChatResult > sendMessageWithAttachments ( @ NonNull final String conversationId , @ NonNull final MessageToSend message , @ Nullable final List < Attachment > attachments ) { } } | final MessageProcessor messageProcessor = attCon . createMessageProcessor ( message , attachments , conversationId , getProfileId ( ) ) ; return checkState ( ) . flatMap ( client -> { messageProcessor . preparePreUpload ( ) ; // convert fom too large message parts to attachments , adds temp upload parts for all attachments
return upsertTempMessage ( messageProcessor . createTempMessage ( ) ) // create temporary message
. flatMap ( isOk -> attCon . uploadAttachments ( messageProcessor . getAttachments ( ) , client ) ) // upload attachments
. flatMap ( uploaded -> { if ( uploaded != null && ! uploaded . isEmpty ( ) ) { messageProcessor . preparePostUpload ( uploaded ) ; // remove temp upload parts , add parts with upload data
return upsertTempMessage ( messageProcessor . createTempMessage ( ) ) ; // update message with attachments details like url
} else { return Observable . fromCallable ( ( ) -> true ) ; } } ) . flatMap ( isOk -> client . service ( ) . messaging ( ) . sendMessage ( conversationId , messageProcessor . prepareMessageToSend ( ) ) // send message with attachments details as additional message parts
. flatMap ( result -> result . isSuccessful ( ) ? updateStoreWithSentMsg ( messageProcessor , result ) : handleMessageError ( messageProcessor , new ComapiException ( result . getErrorBody ( ) ) ) ) // update temporary message with a new message id obtained from the response
. onErrorResumeNext ( t -> handleMessageError ( messageProcessor , t ) ) ) ; // if error occurred update message status list adding error status
} ) ; |
public class MSDOSHeader { /** * { @ inheritDoc } */
@ Override public String getInfo ( ) { } } | if ( headerData == null ) { return "No MS DOS Header found!" + NL ; } else { StringBuilder b = new StringBuilder ( "-------------" + NL + "MS DOS Header" + NL + "-------------" + NL ) ; for ( StandardField entry : headerData . values ( ) ) { b . append ( entry . getDescription ( ) + ": " + entry . getValue ( ) + " (0x" + Long . toHexString ( entry . getValue ( ) ) + ")" + NL ) ; } return b . toString ( ) ; } |
public class URL { /** * Returns a new URL representing its " parent " component .
* @ return a new URL representing its " parent " component */
public URL getParent ( ) { } } | if ( isField ( ) ) { return getCharacteristicURL ( ) ; } else if ( isCharacteristic ( ) ) { return getServiceURL ( ) ; } else if ( isService ( ) ) { return getDeviceURL ( ) ; } else if ( isDevice ( ) ) { return getAdapterURL ( ) ; } else if ( isAdapter ( ) ) { return getProtocolURL ( ) ; } else { return null ; } |
public class HttpOutputStreamImpl { /** * @ see java . io . OutputStream # close ( ) */
@ Override public void close ( ) throws IOException { } } | if ( isClosed ( ) ) { return ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "Closing stream: hc: " + this . hashCode ( ) + " details: " + this ) ; } this . closed = true ; this . ignoreFlush = false ; try { flushBuffers ( ) ; } finally { // must release the buffers even if the flush fails
clear ( ) ; } |
public class IndexedCollectionCertStore { /** * Iterate through all the X509Certificates and add matches to the
* collection . */
private void matchX509Certs ( CertSelector selector , Collection < Certificate > matches ) { } } | for ( Object obj : certSubjects . values ( ) ) { if ( obj instanceof X509Certificate ) { X509Certificate cert = ( X509Certificate ) obj ; if ( selector . match ( cert ) ) { matches . add ( cert ) ; } } else { // See certSubjects javadoc .
@ SuppressWarnings ( "unchecked" ) List < X509Certificate > list = ( List < X509Certificate > ) obj ; for ( X509Certificate cert : list ) { if ( selector . match ( cert ) ) { matches . add ( cert ) ; } } } } |
public class IllegalGuardedBy { /** * Throws an { @ link IllegalGuardedBy } exception if the given condition is false . */
public static void checkGuardedBy ( boolean condition , String formatString , Object ... formatArgs ) { } } | if ( ! condition ) { throw new IllegalGuardedBy ( String . format ( formatString , formatArgs ) ) ; } |
public class ClassFileUtils { /** * Delegates proxy creation via { @ link ProxyServices } to the integrator . */
public static Class < ? > toClass ( ClassFile ct , Class < ? > originalClass , ProxyServices proxyServices , ProtectionDomain domain ) { } } | try { byte [ ] bytecode = ct . toBytecode ( ) ; Class < ? > result ; if ( domain == null ) { result = proxyServices . defineClass ( originalClass , ct . getName ( ) , bytecode , 0 , bytecode . length ) ; } else { result = proxyServices . defineClass ( originalClass , ct . getName ( ) , bytecode , 0 , bytecode . length , domain ) ; } return result ; } catch ( RuntimeException e ) { throw e ; } catch ( Exception e ) { throw new RuntimeException ( e ) ; } |
public class Types { /** * Test if a requested type is identity equal with one from a given types list . If < code > type < / code > is null return
* false . If a type to match happened to be null is considered no match .
* @ param t type to search for , possible null ,
* @ param typesToMatch types list to compare with .
* @ return true if requested type is one from given types list .
* @ throws IllegalArgumentException if < code > typesToMach < / code > is empty . */
public static boolean equalsAny ( Type t , Type ... typesToMatch ) throws IllegalArgumentException { } } | Params . notNullOrEmpty ( typesToMatch , "Types to match" ) ; if ( t == null ) { return false ; } for ( Type typeToMatch : typesToMatch ) { if ( t . equals ( typeToMatch ) ) { return true ; } } return false ; |
public class Date { /** * setter for month - sets month of the year , C
* @ generated
* @ param v value to set into the feature */
public void setMonth ( int v ) { } } | if ( Date_Type . featOkTst && ( ( Date_Type ) jcasType ) . casFeat_month == null ) jcasType . jcas . throwFeatMissing ( "month" , "de.julielab.jules.types.Date" ) ; jcasType . ll_cas . ll_setIntValue ( addr , ( ( Date_Type ) jcasType ) . casFeatCode_month , v ) ; |
public class Bitstream { /** * Load ID3v2 frames .
* @ param in MP3 InputStream .
* @ author JavaZOOM */
private void loadID3v2 ( InputStream in ) { } } | int size = - 1 ; try { // Read ID3v2 header ( 10 bytes ) .
in . mark ( 10 ) ; size = readID3v2Header ( in ) ; header_pos = size ; } catch ( IOException e ) { } finally { try { // Unread ID3v2 header ( 10 bytes ) .
in . reset ( ) ; } catch ( IOException e ) { } } // Load ID3v2 tags .
try { if ( size > 0 ) { rawid3v2 = new byte [ size ] ; in . read ( rawid3v2 , 0 , rawid3v2 . length ) ; } } catch ( IOException e ) { } |
public class DoubleBond3DParity { /** * Calculate the configuration of the double bond as a parity .
* @ return opposite ( + 1 ) , together ( - 1) */
@ Override public int parity ( ) { } } | // create three vectors , v - > u , v - > w and u - > x
double [ ] vu = toVector ( v , u ) ; double [ ] vw = toVector ( v , w ) ; double [ ] ux = toVector ( u , x ) ; // normal vector ( to compare against ) , the normal vector ( n ) looks like :
// x n w
// u = v
double [ ] normal = crossProduct ( vu , crossProduct ( vu , vw ) ) ; // compare the dot products of v - > w and u - > x , if the signs are the same
// they are both pointing the same direction . if a value is close to 0
// then it is at pi / 2 radians ( i . e . unspecified ) however 3D coordinates
// are generally discrete and do not normally represent on unspecified
// stereo configurations so we don ' t check this
int parity = ( int ) Math . signum ( dot ( normal , vw ) ) * ( int ) Math . signum ( dot ( normal , ux ) ) ; // invert sign , this then matches with Sp2 double bond parity
return parity * - 1 ; |
public class SFSUtilities { /** * Use this only if DataSource is not available .
* In order to be able to use { @ link ResultSet # unwrap ( Class ) } and
* { @ link java . sql . ResultSetMetaData # unwrap ( Class ) } to get { @ link SpatialResultSet } and
* { @ link SpatialResultSetMetaData } this method wrap the provided connection .
* @ param connection H2 or PostGIS Connection
* @ return Wrapped DataSource , with spatial methods */
public static Connection wrapConnection ( Connection connection ) { } } | try { if ( connection . isWrapperFor ( ConnectionWrapper . class ) ) { return connection ; } else { return new ConnectionWrapper ( connection ) ; } } catch ( SQLException ex ) { return new ConnectionWrapper ( connection ) ; } |
public class OIDCClientAuthenticatorUtil { /** * todo : avoid call on each request . */
public String setRedirectUrlIfNotDefined ( HttpServletRequest req , ConvergedClientConfig clientConfig ) { } } | // String redirect _ url = clientConfig . getRedirectUrlFromServerToClient ( ) ;
String redirect_url = null ; // in oidc case , configimpl completely builds this url , in social we need to finish building it .
if ( clientConfig . isSocial ( ) ) { redirect_url = getRedirectUrlFromServerToClient ( clientConfig . getId ( ) , clientConfig . getContextPath ( ) , clientConfig . getRedirectUrlFromServerToClient ( ) ) ; } else { redirect_url = clientConfig . getRedirectUrlFromServerToClient ( ) ; } // in oidc and social case , null unless redirectToRPHostAndPort specified .
if ( redirect_url == null || redirect_url . isEmpty ( ) ) { String uri = clientConfig . getContextPath ( ) + "/redirect/" + clientConfig . getId ( ) ; redirect_url = new OidcClientUtil ( ) . getRedirectUrl ( req , uri ) ; } redirect_url = clientConfig . getRedirectUrlWithJunctionPath ( redirect_url ) ; return redirect_url ; |
public class SocketChannelWrapperBar { /** * Returns the server inet address that accepted the request . */
@ Override public InetAddress addressLocal ( ) { } } | SocketChannel s = _channel ; if ( s != null ) { try { InetSocketAddress addr = ( InetSocketAddress ) s . getLocalAddress ( ) ; return addr . getAddress ( ) ; } catch ( IOException e ) { throw new RuntimeException ( e ) ; } } else { return null ; } |
public class QueueManager { /** * Challange a QueueMemberRemovedEvent .
* @ param event - the generated QueueMemberRemovedEvent . */
public void handleQueueMemberRemovedEvent ( QueueMemberRemovedEvent event ) { } } | final AsteriskQueueImpl queue = getInternalQueueByName ( event . getQueue ( ) ) ; if ( queue == null ) { logger . error ( "Ignored QueueMemberRemovedEvent for unknown queue " + event . getQueue ( ) ) ; return ; } final AsteriskQueueMemberImpl member = queue . getMember ( event . getLocation ( ) ) ; if ( member == null ) { logger . error ( "Ignored QueueMemberRemovedEvent for unknown agent name: " + event . getMemberName ( ) + " location: " + event . getLocation ( ) + " queue: " + event . getQueue ( ) ) ; return ; } queue . removeMember ( member ) ; |
public class ReflectionUtil { /** * Get the value of a given field on a given object via reflection .
* @ param object
* - - target object of field access
* @ param clazz
* - - type of argument object
* @ param fieldName
* - - name of the field
* @ return - - the value of the represented field in object ; primitive values
* are wrapped in an appropriate object before being returned */
public static Object getFieldValue ( final Object object , final Class < ? > clazz , final String fieldName ) { } } | try { final Field field = clazz . getDeclaredField ( fieldName ) ; return getFieldValue ( object , field ) ; } catch ( final Exception e ) { throw new IllegalArgumentException ( "Could not get field value: " + fieldName , e ) ; } |
public class BasicVector { /** * Parses { @ link BasicVector } from the given Matrix Market .
* @ param is the input stream in Matrix Market format
* @ return a parsed vector
* @ exception IOException if an I / O error occurs . */
public static BasicVector fromMatrixMarket ( InputStream is ) throws IOException { } } | return Vector . fromMatrixMarket ( is ) . to ( Vectors . BASIC ) ; |
public class WButton { /** * Return the button value . By default the value is the same as the text placed on the button .
* @ return the button value */
public String getValue ( ) { } } | Object value = getData ( ) ; if ( value != null ) { return value . toString ( ) ; } String text = getText ( ) ; return text == null ? NO_VALUE : text ; |
public class UndertowRequest { /** * Stolen from Googles Guava Suppliers . java
* @ param delegate
* @ return */
public static < T > Supplier < T > memoizeLock ( Supplier < T > delegate ) { } } | AtomicReference < T > value = new AtomicReference < > ( ) ; return ( ) -> { // A 2 - field variant of Double Checked Locking .
T val = value . get ( ) ; if ( val == null ) { synchronized ( value ) { val = value . get ( ) ; if ( val == null ) { val = Objects . requireNonNull ( delegate . get ( ) ) ; value . set ( val ) ; } } } return val ; } ; |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.