signature stringlengths 43 39.1k | implementation stringlengths 0 450k |
|---|---|
public class CoverageDataPng { /** * Draw a coverage data tile and format as PNG bytes from the double array
* of " unsigned short " pixel values formatted as short [ row ] [ width ]
* @ param pixelValues
* " unsigned short " pixel values as [ row ] [ width ]
* @ return coverage data image tile bytes */
public byte [ ] drawTileData ( short [ ] [ ] pixelValues ) { } } | BufferedImage image = drawTile ( pixelValues ) ; byte [ ] bytes = getImageBytes ( image ) ; return bytes ; |
public class DefaultClientConfigImpl { /** * This is to workaround the issue that { @ link AbstractConfiguration } by default
* automatically convert comma delimited string to array */
protected static String getStringValue ( Configuration config , String key ) { } } | try { String values [ ] = config . getStringArray ( key ) ; if ( values == null ) { return null ; } if ( values . length == 0 ) { return config . getString ( key ) ; } else if ( values . length == 1 ) { return values [ 0 ] ; } StringBuilder sb = new StringBuilder ( ) ; for ( int i = 0 ; i < values . length ; i ++ ) { sb . append ( values [ i ] ) ; if ( i != values . length - 1 ) { sb . append ( "," ) ; } } return sb . toString ( ) ; } catch ( Exception e ) { Object v = config . getProperty ( key ) ; if ( v != null ) { return String . valueOf ( v ) ; } else { return null ; } } |
public class BaseRTMPClientHandler { /** * { @ inheritDoc } */
@ Override protected void onPing ( RTMPConnection conn , Channel channel , Header source , Ping ping ) { } } | log . trace ( "onPing" ) ; switch ( ping . getEventType ( ) ) { case Ping . PING_CLIENT : case Ping . STREAM_BEGIN : case Ping . RECORDED_STREAM : case Ping . STREAM_PLAYBUFFER_CLEAR : // the server wants to measure the RTT
Ping pong = new Ping ( ) ; pong . setEventType ( Ping . PONG_SERVER ) ; pong . setValue2 ( ( int ) ( System . currentTimeMillis ( ) & 0xffffffff ) ) ; conn . ping ( pong ) ; break ; case Ping . STREAM_DRY : log . debug ( "Stream indicates there is no data available" ) ; break ; case Ping . CLIENT_BUFFER : // set the client buffer
IClientStream stream = null ; // get the stream id
Number streamId = ping . getValue2 ( ) ; // get requested buffer size in milliseconds
int buffer = ping . getValue3 ( ) ; log . debug ( "Client sent a buffer size: {} ms for stream id: {}" , buffer , streamId ) ; // the client wants to set the buffer time
stream = conn . getStreamById ( streamId ) ; if ( stream != null ) { stream . setClientBufferDuration ( buffer ) ; log . info ( "Setting client buffer on stream: {}" , buffer ) ; } // catch - all to make sure buffer size is set
if ( stream == null ) { // remember buffer time until stream is created
conn . rememberStreamBufferDuration ( streamId . intValue ( ) , buffer ) ; log . info ( "Remembering client buffer on stream: {}" , buffer ) ; } break ; case Ping . PING_SWF_VERIFY : log . debug ( "SWF verification ping" ) ; // TODO get the swf verification bytes from the handshake
SWFResponse swfPong = new SWFResponse ( new byte [ 42 ] ) ; conn . ping ( swfPong ) ; break ; case Ping . BUFFER_EMPTY : log . debug ( "Buffer empty ping" ) ; break ; case Ping . BUFFER_FULL : log . debug ( "Buffer full ping" ) ; break ; default : log . warn ( "Unhandled ping: {}" , ping ) ; } |
public class EmailAlarmCallback { /** * / * This method should be used when we want to provide user auto - completion to users that have permissions for it */
public ConfigurationRequest getEnrichedRequestedConfiguration ( ) { } } | final Map < String , String > regularUsers = userService . loadAll ( ) . stream ( ) . collect ( Collectors . toMap ( User :: getName , User :: getName ) ) ; final Map < String , String > userNames = ImmutableMap . < String , String > builder ( ) . put ( graylogConfig . getRootUsername ( ) , graylogConfig . getRootUsername ( ) ) . putAll ( regularUsers ) . build ( ) ; return getConfigurationRequest ( userNames ) ; |
public class Converter { /** * convert */
private static void convertUnicodeToHex ( String str ) { } } | try { display ( "Unicode to hex: " + Utils . toHexBytes ( Utils . toBytes ( str ) ) ) ; } catch ( Exception e ) { } |
public class SimpleHTTPClient { /** * Register an health check task .
* @ param task The health check task . */
public void registerHealthCheck ( Task task ) { } } | Optional . ofNullable ( config . getHealthCheck ( ) ) . ifPresent ( healthCheck -> healthCheck . register ( task ) ) ; |
public class SyndataXXJobConfig { /** * init */
public void initXxlJobExecutor ( ) { } } | PropertiesContainer propertiesContainer = new PropertiesContainer ( ) ; propertiesContainer . addConfigPropertiesFile ( "application.properties" ) ; Map < Object , Object > objectMap = propertiesContainer . getAllProperties ( ) ; if ( objectMap != null ) { // registry jobhandler
Set < Map . Entry < Object , Object > > entrySet = objectMap . entrySet ( ) ; Iterator < Map . Entry < Object , Object > > iterator = entrySet . iterator ( ) ; while ( iterator . hasNext ( ) ) { Map . Entry < Object , Object > entry = iterator . next ( ) ; String name = ( String ) entry . getKey ( ) ; String orineName = name ; if ( name . startsWith ( "xxl.job.task." ) ) { name = name . substring ( "xxl.job.task." . length ( ) ) . trim ( ) ; String value = ( String ) entry . getValue ( ) ; String orignValue = value ; if ( value != null ) { value = value . trim ( ) ; if ( ! value . equals ( "" ) ) { try { IJobHandler abstractDB2ESXXJobHandler = ( IJobHandler ) Class . forName ( value ) . newInstance ( ) ; XxlJobExecutor . registJobHandler ( name , abstractDB2ESXXJobHandler ) ; } catch ( Exception e ) { logger . error ( new StringBuilder ( ) . append ( "registJobHandler [" ) . append ( orineName ) . append ( "=" ) . append ( orignValue ) . append ( "] failed:" ) . toString ( ) , e ) ; } } } } } } // load executor prop
// Properties xxlJobProp = loadProperties ( " xxl - job - executor . properties " ) ;
// init executor
xxlJobExecutor = new XxlJobExecutor ( ) ; xxlJobExecutor . setAdminAddresses ( propertiesContainer . getProperty ( "xxl.job.admin.addresses" ) ) ; xxlJobExecutor . setAppName ( propertiesContainer . getProperty ( "xxl.job.executor.appname" ) ) ; xxlJobExecutor . setIp ( propertiesContainer . getProperty ( "xxl.job.executor.ip" ) ) ; xxlJobExecutor . setPort ( Integer . valueOf ( propertiesContainer . getProperty ( "xxl.job.executor.port" ) ) ) ; xxlJobExecutor . setAccessToken ( propertiesContainer . getProperty ( "xxl.job.accessToken" ) ) ; xxlJobExecutor . setLogPath ( propertiesContainer . getProperty ( "xxl.job.executor.logpath" ) ) ; xxlJobExecutor . setLogRetentionDays ( Integer . valueOf ( propertiesContainer . getProperty ( "xxl.job.executor.logretentiondays" ) ) ) ; // start executor
try { xxlJobExecutor . start ( ) ; } catch ( Exception e ) { logger . error ( e . getMessage ( ) , e ) ; } |
public class ZipUtils { /** * Unzips the given { @ link File ZIP file } to the specified { @ link File directory } .
* @ param zip { @ link File ZIP file } to unzip .
* @ param directory { @ link File } referring to the file system path location in which to
* unzip the { @ link File ZIP file } .
* @ throws IllegalArgumentException if { @ link File ZIP file } is { @ literal null }
* or the specified { @ link File directory } is not a valid directory .
* @ throws IOException if an IO error occurs while reading the { @ link File ZIP file } .
* @ throws SystemException if the { @ link File ZIP file } could not be read or its contents unzipped .
* @ see java . util . zip . ZipFile
* @ see java . io . File */
public static void unzip ( File zip , File directory ) throws IOException { } } | Assert . notNull ( zip , "ZIP file is required" ) ; Assert . isTrue ( FileUtils . createDirectory ( directory ) , String . format ( "[%s] is not a valid directory" , directory ) ) ; ZipFile zipFile = new ZipFile ( zip , ZipFile . OPEN_READ ) ; zipFile . stream ( ) . forEach ( zipEntry -> { if ( zipEntry . isDirectory ( ) ) { Assert . state ( FileUtils . createDirectory ( new File ( directory , zipEntry . getName ( ) ) ) , newSystemException ( "Failed to create directory [%s] for ZIP entry" , zipEntry . getName ( ) ) ) ; } else { DataInputStream entryInputStream = null ; DataOutputStream entryOutputStream = null ; try { File zipEntryFile = new File ( directory , zipEntry . getName ( ) ) ; Assert . state ( FileUtils . createDirectory ( zipEntryFile . getParentFile ( ) ) , newSystemException ( "Failed to create directory [%1$s] for entry [%2$s]" , zipEntryFile . getParent ( ) , zipEntry . getName ( ) ) ) ; Assert . state ( zipEntryFile . createNewFile ( ) , newSystemException ( "Filed to create file [%1$s] for entry [%2$s]" , zipEntryFile , zipEntry . getName ( ) ) ) ; entryInputStream = new DataInputStream ( zipFile . getInputStream ( zipEntry ) ) ; entryOutputStream = new DataOutputStream ( new FileOutputStream ( zipEntryFile ) ) ; IOUtils . copy ( entryInputStream , entryOutputStream ) ; } catch ( IOException cause ) { throw newSystemException ( cause , "Failed to unzip entry [%s]" , zipEntry . getName ( ) ) ; } finally { IOUtils . close ( entryInputStream ) ; IOUtils . close ( entryOutputStream ) ; } } } ) ; |
public class DynamicWeightedMajority { /** * DWM : : removeWeakestExpert */
@ Override public void trainOnInstanceImpl ( Instance inst ) { } } | this . epochs ++ ; double [ ] Pr = new double [ inst . numClasses ( ) ] ; double maxWeight = 0.0 ; double weakestExpertWeight = 1.0 ; int weakestExpertIndex = - 1 ; // Loop over experts
for ( int i = 0 ; i < this . experts . size ( ) ; i ++ ) { double [ ] pr = this . experts . get ( i ) . getVotesForInstance ( inst ) ; int yHat = Utils . maxIndex ( pr ) ; if ( ( yHat != ( int ) inst . classValue ( ) ) && this . epochs % this . periodOption . getValue ( ) == 0 ) { this . weights . set ( i , this . weights . get ( i ) * this . betaOption . getValue ( ) ) ; } Pr [ yHat ] += this . weights . get ( i ) ; maxWeight = Math . max ( maxWeight , this . weights . get ( i ) ) ; if ( this . weights . get ( i ) < weakestExpertWeight ) { weakestExpertIndex = i ; weakestExpertWeight = weights . get ( i ) ; } } int yHat = Utils . maxIndex ( Pr ) ; if ( this . epochs % this . periodOption . getValue ( ) == 0 ) { scaleWeights ( maxWeight ) ; removeExperts ( ) ; if ( yHat != ( int ) inst . classValue ( ) ) { if ( experts . size ( ) == this . maxExpertsOption . getValue ( ) ) { removeWeakestExpert ( weakestExpertIndex ) ; } Classifier classifier = ( ( Classifier ) getPreparedClassOption ( this . baseLearnerOption ) ) . copy ( ) ; classifier . resetLearning ( ) ; this . experts . add ( classifier ) ; this . weights . add ( 1.0 ) ; } } // train experts
for ( Classifier expert : this . experts ) { expert . trainOnInstance ( inst ) ; } |
public class Scanner { /** * Scan the { @ link # input } .
* @ return A list of selector groups that contain a list of { @ link Selector } s scanned .
* @ throws ScannerException If the input is invalid . */
public List < List < Selector > > scan ( ) throws ScannerException { } } | char [ ] data = input . toCharArray ( ) ; int cs ; int top ; int [ ] stack = new int [ 32 ] ; int eof = data . length ; int p = 0 ; int pe = eof ; int mark = 0 ; LinkedList < List < Selector > > selectors = new LinkedList < List < Selector > > ( ) ; // List < Selector > parts = new LinkedList < Selector > ( ) ;
List < Selector > parts = null ; String tagName = Selector . UNIVERSAL_TAG ; String negationTagName = Selector . UNIVERSAL_TAG ; Selector . Combinator combinator = null ; List < Specifier > specifiers = new LinkedList < Specifier > ( ) ; String attributeName = null ; String attributeValue = null ; AttributeSpecifier . Match attributeMatch = null ; String pseudoNthClass = null ; boolean isNegation = false ; Selector negationSelector = null ; // line 1538 " . . / java / se / fishtank / css / selectors / scanner / Scanner . java "
{ cs = Scanner_start ; top = 0 ; } // line 221 " Scanner . java . rl "
// line 1546 " . . / java / se / fishtank / css / selectors / scanner / Scanner . java "
{ int _klen ; int _trans = 0 ; int _acts ; int _nacts ; int _keys ; int _goto_targ = 0 ; _goto : while ( true ) { switch ( _goto_targ ) { case 0 : if ( p == pe ) { _goto_targ = 4 ; continue _goto ; } if ( cs == 0 ) { _goto_targ = 5 ; continue _goto ; } case 1 : _match : do { _keys = _Scanner_key_offsets [ cs ] ; _trans = _Scanner_index_offsets [ cs ] ; _klen = _Scanner_single_lengths [ cs ] ; if ( _klen > 0 ) { int _lower = _keys ; int _mid ; int _upper = _keys + _klen - 1 ; while ( true ) { if ( _upper < _lower ) break ; _mid = _lower + ( ( _upper - _lower ) >> 1 ) ; if ( data [ p ] < _Scanner_trans_keys [ _mid ] ) _upper = _mid - 1 ; else if ( data [ p ] > _Scanner_trans_keys [ _mid ] ) _lower = _mid + 1 ; else { _trans += ( _mid - _keys ) ; break _match ; } } _keys += _klen ; _trans += _klen ; } _klen = _Scanner_range_lengths [ cs ] ; if ( _klen > 0 ) { int _lower = _keys ; int _mid ; int _upper = _keys + ( _klen << 1 ) - 2 ; while ( true ) { if ( _upper < _lower ) break ; _mid = _lower + ( ( ( _upper - _lower ) >> 1 ) & ~ 1 ) ; if ( data [ p ] < _Scanner_trans_keys [ _mid ] ) _upper = _mid - 2 ; else if ( data [ p ] > _Scanner_trans_keys [ _mid + 1 ] ) _lower = _mid + 2 ; else { _trans += ( ( _mid - _keys ) >> 1 ) ; break _match ; } } _trans += _klen ; } } while ( false ) ; _trans = _Scanner_indicies [ _trans ] ; cs = _Scanner_trans_targs [ _trans ] ; if ( _Scanner_trans_actions [ _trans ] != 0 ) { _acts = _Scanner_trans_actions [ _trans ] ; _nacts = ( int ) _Scanner_actions [ _acts ++ ] ; while ( _nacts -- > 0 ) { switch ( _Scanner_actions [ _acts ++ ] ) { case 0 : // line 44 " Scanner . java . rl "
{ AttributeSpecifier specifier ; if ( attributeValue != null ) { specifier = new AttributeSpecifier ( attributeName , attributeValue , attributeMatch ) ; } else { specifier = new AttributeSpecifier ( attributeName ) ; } specifiers . add ( specifier ) ; } break ; case 1 : // line 55 " Scanner . java . rl "
{ attributeName = getSlice ( mark , p ) ; } break ; case 2 : // line 59 " Scanner . java . rl "
{ String m = getSlice ( mark , p ) ; if ( "=" . equals ( m ) ) { attributeMatch = AttributeSpecifier . Match . EXACT ; } else if ( "~=" . equals ( m ) ) { attributeMatch = AttributeSpecifier . Match . LIST ; } else if ( "|=" . equals ( m ) ) { attributeMatch = AttributeSpecifier . Match . HYPHEN ; } else if ( "^=" . equals ( m ) ) { attributeMatch = AttributeSpecifier . Match . PREFIX ; } else if ( "$=" . equals ( m ) ) { attributeMatch = AttributeSpecifier . Match . SUFFIX ; } else if ( "*=" . equals ( m ) ) { attributeMatch = AttributeSpecifier . Match . CONTAINS ; } } break ; case 3 : // line 76 " Scanner . java . rl "
{ String value = getSlice ( mark , p ) ; if ( value . charAt ( 0 ) == '"' || value . charAt ( 0 ) == '\'' ) { value = value . substring ( 1 , value . length ( ) - 1 ) ; } attributeValue = value ; } break ; case 4 : // line 85 " Scanner . java . rl "
{ specifiers . add ( new AttributeSpecifier ( "class" , getSlice ( mark , p ) , AttributeSpecifier . Match . LIST ) ) ; } break ; case 5 : // line 90 " Scanner . java . rl "
{ switch ( data [ p ] ) { case ' ' : combinator = Selector . Combinator . DESCENDANT ; break ; case '>' : combinator = Selector . Combinator . CHILD ; break ; case '+' : combinator = Selector . Combinator . ADJACENT_SIBLING ; break ; case '~' : combinator = Selector . Combinator . GENERAL_SIBLING ; break ; } } break ; case 6 : // line 107 " Scanner . java . rl "
{ parts = new LinkedList < Selector > ( ) ; } break ; case 7 : // line 111 " Scanner . java . rl "
{ selectors . add ( parts ) ; } break ; case 8 : // line 115 " Scanner . java . rl "
{ specifiers . add ( new AttributeSpecifier ( "id" , getSlice ( mark , p ) , AttributeSpecifier . Match . EXACT ) ) ; } break ; case 9 : // line 120 " Scanner . java . rl "
{ mark = p ; } break ; case 10 : // line 124 " Scanner . java . rl "
{ isNegation = true ; } break ; case 11 : // line 128 " Scanner . java . rl "
{ specifiers . add ( new NegationSpecifier ( negationSelector ) ) ; isNegation = false ; } break ; case 12 : // line 133 " Scanner . java . rl "
{ specifiers . add ( new PseudoClassSpecifier ( getSlice ( mark , p ) ) ) ; } break ; case 13 : // line 137 " Scanner . java . rl "
{ specifiers . add ( new PseudoNthSpecifier ( pseudoNthClass , getSlice ( mark , p ) ) ) ; } break ; case 14 : // line 141 " Scanner . java . rl "
{ pseudoNthClass = getSlice ( mark , p ) ; } break ; case 15 : // line 145 " Scanner . java . rl "
{ Selector selector ; List < Specifier > list = specifiers . isEmpty ( ) ? null : specifiers ; if ( isNegation ) { negationSelector = new Selector ( negationTagName , list ) ; } else { if ( combinator == null ) { selector = new Selector ( tagName , list ) ; } else { selector = new Selector ( tagName , combinator , list ) ; } parts . add ( selector ) ; tagName = Selector . UNIVERSAL_TAG ; combinator = null ; } negationTagName = Selector . UNIVERSAL_TAG ; attributeName = null ; attributeValue = null ; attributeMatch = null ; pseudoNthClass = null ; specifiers = new LinkedList < Specifier > ( ) ; } break ; case 16 : // line 170 " Scanner . java . rl "
{ if ( isNegation ) { negationTagName = getSlice ( mark , p ) ; } else { tagName = getSlice ( mark , p ) ; } } break ; case 17 : // line 28 " ScannerCommon . rl "
{ { stack [ top ++ ] = cs ; cs = 150 ; _goto_targ = 2 ; if ( true ) continue _goto ; } } case 18 : // line 42 " ScannerCommon . rl "
{ { cs = stack [ -- top ] ; _goto_targ = 2 ; if ( true ) continue _goto ; } } // line 1802 " . . / java / se / fishtank / css / selectors / scanner / Scanner . java "
} } } case 2 : if ( cs == 0 ) { _goto_targ = 5 ; continue _goto ; } if ( ++ p != pe ) { _goto_targ = 1 ; continue _goto ; } case 4 : if ( p == eof ) { int __acts = _Scanner_eof_actions [ cs ] ; int __nacts = ( int ) _Scanner_actions [ __acts ++ ] ; while ( __nacts -- > 0 ) { switch ( _Scanner_actions [ __acts ++ ] ) { case 4 : // line 85 " Scanner . java . rl "
{ specifiers . add ( new AttributeSpecifier ( "class" , getSlice ( mark , p ) , AttributeSpecifier . Match . LIST ) ) ; } break ; case 7 : // line 111 " Scanner . java . rl "
{ selectors . add ( parts ) ; } break ; case 8 : // line 115 " Scanner . java . rl "
{ specifiers . add ( new AttributeSpecifier ( "id" , getSlice ( mark , p ) , AttributeSpecifier . Match . EXACT ) ) ; } break ; case 12 : // line 133 " Scanner . java . rl "
{ specifiers . add ( new PseudoClassSpecifier ( getSlice ( mark , p ) ) ) ; } break ; case 15 : // line 145 " Scanner . java . rl "
{ Selector selector ; List < Specifier > list = specifiers . isEmpty ( ) ? null : specifiers ; if ( isNegation ) { negationSelector = new Selector ( negationTagName , list ) ; } else { if ( combinator == null ) { selector = new Selector ( tagName , list ) ; } else { selector = new Selector ( tagName , combinator , list ) ; } parts . add ( selector ) ; tagName = Selector . UNIVERSAL_TAG ; combinator = null ; } negationTagName = Selector . UNIVERSAL_TAG ; attributeName = null ; attributeValue = null ; attributeMatch = null ; pseudoNthClass = null ; specifiers = new LinkedList < Specifier > ( ) ; } break ; case 16 : // line 170 " Scanner . java . rl "
{ if ( isNegation ) { negationTagName = getSlice ( mark , p ) ; } else { tagName = getSlice ( mark , p ) ; } } break ; // line 1886 " . . / java / se / fishtank / css / selectors / scanner / Scanner . java "
} } } case 5 : } break ; } } // line 222 " Scanner . java . rl "
if ( cs < Scanner_first_final && p != pe ) { // TODO : Better error reporting ; )
throw new ScannerException ( "Bad input!" ) ; } return selectors ; |
public class TagContextBuilder { /** * Adds the key / value pair and metadata regardless of whether the key is present .
* @ param key the { @ code TagKey } which will be set .
* @ param value the { @ code TagValue } to set for the given key .
* @ param tagMetadata the { @ code TagMetadata } associated with this { @ link Tag } .
* @ return this
* @ since 0.20 */
public TagContextBuilder put ( TagKey key , TagValue value , TagMetadata tagMetadata ) { } } | @ SuppressWarnings ( "deprecation" ) TagContextBuilder builder = put ( key , value ) ; return builder ; |
public class MappingFilterParser { /** * C : \ \ Project \ \ Obdalib \ \ obdalib - parent \ \ obdalib - core \ \ src \ \ main \ \ java \ \ it \ \ unibz \ \ inf \ \ obda \ \ gui \ \ swing \ \ utils \ \ MappingFilter . g : 68:1 : type returns [ TreeModelFilter < OBDAMappingAxiom > value ] : ( ID | TEXT | TARGET | SOURCE | FUNCT | PRED ) ; */
public final TreeModelFilter < SQLPPTriplesMap > type ( ) throws RecognitionException { } } | TreeModelFilter < SQLPPTriplesMap > value = null ; try { // C : \ \ Project \ \ Obdalib \ \ obdalib - parent \ \ obdalib - core \ \ src \ \ main \ \ java \ \ it \ \ unibz \ \ inf \ \ obda \ \ gui \ \ swing \ \ utils \ \ MappingFilter . g : 69:3 : ( ID | TEXT | TARGET | SOURCE | FUNCT | PRED )
int alt4 = 6 ; switch ( input . LA ( 1 ) ) { case ID : { alt4 = 1 ; } break ; case TEXT : { alt4 = 2 ; } break ; case TARGET : { alt4 = 3 ; } break ; case SOURCE : { alt4 = 4 ; } break ; case FUNCT : { alt4 = 5 ; } break ; case PRED : { alt4 = 6 ; } break ; default : NoViableAltException nvae = new NoViableAltException ( "" , 4 , 0 , input ) ; throw nvae ; } switch ( alt4 ) { case 1 : // C : \ \ Project \ \ Obdalib \ \ obdalib - parent \ \ obdalib - core \ \ src \ \ main \ \ java \ \ it \ \ unibz \ \ inf \ \ obda \ \ gui \ \ swing \ \ utils \ \ MappingFilter . g : 69:5 : ID
{ match ( input , ID , FOLLOW_ID_in_type120 ) ; value = new MappingIDTreeModelFilter ( ) ; } break ; case 2 : // C : \ \ Project \ \ Obdalib \ \ obdalib - parent \ \ obdalib - core \ \ src \ \ main \ \ java \ \ it \ \ unibz \ \ inf \ \ obda \ \ gui \ \ swing \ \ utils \ \ MappingFilter . g : 70:5 : TEXT
{ match ( input , TEXT , FOLLOW_TEXT_in_type133 ) ; value = new MappingStringTreeModelFilter ( ) ; } break ; case 3 : // C : \ \ Project \ \ Obdalib \ \ obdalib - parent \ \ obdalib - core \ \ src \ \ main \ \ java \ \ it \ \ unibz \ \ inf \ \ obda \ \ gui \ \ swing \ \ utils \ \ MappingFilter . g : 71:5 : TARGET
{ match ( input , TARGET , FOLLOW_TARGET_in_type144 ) ; value = new MappingHeadVariableTreeModelFilter ( ) ; } break ; case 4 : // C : \ \ Project \ \ Obdalib \ \ obdalib - parent \ \ obdalib - core \ \ src \ \ main \ \ java \ \ it \ \ unibz \ \ inf \ \ obda \ \ gui \ \ swing \ \ utils \ \ MappingFilter . g : 72:5 : SOURCE
{ match ( input , SOURCE , FOLLOW_SOURCE_in_type153 ) ; value = new MappingSQLStringTreeModelFilter ( ) ; } break ; case 5 : // C : \ \ Project \ \ Obdalib \ \ obdalib - parent \ \ obdalib - core \ \ src \ \ main \ \ java \ \ it \ \ unibz \ \ inf \ \ obda \ \ gui \ \ swing \ \ utils \ \ MappingFilter . g : 73:5 : FUNCT
{ match ( input , FUNCT , FOLLOW_FUNCT_in_type162 ) ; value = new MappingFunctorTreeModelFilter ( ) ; } break ; case 6 : // C : \ \ Project \ \ Obdalib \ \ obdalib - parent \ \ obdalib - core \ \ src \ \ main \ \ java \ \ it \ \ unibz \ \ inf \ \ obda \ \ gui \ \ swing \ \ utils \ \ MappingFilter . g : 74:5 : PRED
{ match ( input , PRED , FOLLOW_PRED_in_type172 ) ; value = new MappingPredicateTreeModelFilter ( ) ; } break ; } } catch ( RecognitionException re ) { reportError ( re ) ; recover ( input , re ) ; } finally { // do for sure before leaving
} return value ; |
public class FileManager { /** * Called before starting generating proxies files . Create the
* { @ code java4cpp . log } log file , and manage the { @ code clean } and
* { @ code useHash } settings . */
public void start ( ) { } } | addSymbolsFromSettings ( ) ; File rep = new File ( context . getSettings ( ) . getTargetPath ( ) ) ; rep . mkdirs ( ) ; try { java4cppLog = new FileWriter ( new File ( getPath ( JAVA4CPP_LOG ) ) ) ; } catch ( IOException e ) { System . err . println ( "Can't create log file: " + e . getMessage ( ) ) ; } try { File [ ] existings = rep . listFiles ( new SourceFilter ( ) ) ; if ( existings != null ) { oldFiles = new ArrayList < File > ( Arrays . asList ( existings ) ) ; } java4cppHash = new File ( getPath ( JAVA4CPP_HASH ) ) ; BufferedInputStream in = new BufferedInputStream ( new FileInputStream ( java4cppHash ) ) ; oldHashes . load ( in ) ; in . close ( ) ; java4cppHash . delete ( ) ; } catch ( IOException e ) { logInfo ( "no java4cpp.hash file, regenerating all files" ) ; } |
public class AbstractSuppressionAnalyzer { /** * The prepare method loads the suppression XML file .
* @ param engine a reference the dependency - check engine
* @ throws InitializationException thrown if there is an exception */
@ Override public synchronized void prepareAnalyzer ( Engine engine ) throws InitializationException { } } | if ( rules . isEmpty ( ) ) { try { loadSuppressionBaseData ( ) ; } catch ( SuppressionParseException ex ) { throw new InitializationException ( "Error initializing the suppression analyzer: " + ex . getLocalizedMessage ( ) , ex , true ) ; } try { loadSuppressionData ( ) ; } catch ( SuppressionParseException ex ) { throw new InitializationException ( "Warn initializing the suppression analyzer: " + ex . getLocalizedMessage ( ) , ex , false ) ; } } |
public class FacesBackingBeanFactory { /** * Get a FacesBackingBean instance , given a FacesBackingBean class .
* @ param beanClass the Class , which must be assignable to { @ link FacesBackingBean } .
* @ return a new FacesBackingBean instance . */
public FacesBackingBean getFacesBackingBeanInstance ( Class beanClass ) throws InstantiationException , IllegalAccessException { } } | assert FacesBackingBean . class . isAssignableFrom ( beanClass ) : "Class " + beanClass . getName ( ) + " does not extend " + FacesBackingBean . class . getName ( ) ; return ( FacesBackingBean ) beanClass . newInstance ( ) ; |
public class ProfileController { /** * Display the current user ' s approvals */
@ RequestMapping ( value = "/profile" , method = RequestMethod . GET ) public String get ( Authentication authentication , Model model ) { } } | Map < String , List < DescribedApproval > > approvals = getCurrentApprovalsForUser ( getCurrentUserId ( ) ) ; Map < String , String > clientNames = getClientNames ( approvals ) ; model . addAttribute ( "clientnames" , clientNames ) ; model . addAttribute ( "approvals" , approvals ) ; model . addAttribute ( "isUaaManagedUser" , isUaaManagedUser ( authentication ) ) ; return "approvals" ; |
public class HttpHealthCheckClient { /** * Deletes the specified HttpHealthCheck resource .
* < p > Sample code :
* < pre > < code >
* try ( HttpHealthCheckClient httpHealthCheckClient = HttpHealthCheckClient . create ( ) ) {
* ProjectGlobalHttpHealthCheckName httpHealthCheck = ProjectGlobalHttpHealthCheckName . of ( " [ PROJECT ] " , " [ HTTP _ HEALTH _ CHECK ] " ) ;
* Operation response = httpHealthCheckClient . deleteHttpHealthCheck ( httpHealthCheck ) ;
* < / code > < / pre >
* @ param httpHealthCheck Name of the HttpHealthCheck resource to delete .
* @ throws com . google . api . gax . rpc . ApiException if the remote call fails */
@ BetaApi public final Operation deleteHttpHealthCheck ( ProjectGlobalHttpHealthCheckName httpHealthCheck ) { } } | DeleteHttpHealthCheckHttpRequest request = DeleteHttpHealthCheckHttpRequest . newBuilder ( ) . setHttpHealthCheck ( httpHealthCheck == null ? null : httpHealthCheck . toString ( ) ) . build ( ) ; return deleteHttpHealthCheck ( request ) ; |
public class MsgSettingController { /** * Gets validation data lists .
* @ param req the req
* @ return the validation data lists */
@ GetMapping ( "/setting/param/from/url" ) public List < ValidationData > getValidationDataLists ( HttpServletRequest req ) { } } | this . validationSessionComponent . sessionCheck ( req ) ; ValidationData data = ParameterMapper . requestParamaterToObject ( req , ValidationData . class , "UTF-8" ) ; return this . msgSettingService . getValidationData ( data . getParamType ( ) , data . getMethod ( ) , data . getUrl ( ) ) ; |
public class ParameterMetaData { /** * { @ inheritDoc } */
public int getPrecision ( final int param ) throws SQLException { } } | try { return this . parameters . get ( param - 1 ) . precision ; } catch ( NullPointerException e ) { throw new SQLException ( "Parameter is not set: " + param ) ; } catch ( IndexOutOfBoundsException out ) { throw new SQLException ( "Parameter out of bounds: " + param ) ; } // end of catch |
public class CmsModelGroupHelper { /** * Returns the model group base element . < p >
* @ param modelGroupPage the model group page
* @ param modelGroupResource the model group resource
* @ return the base element */
private CmsContainerElementBean getModelBaseElement ( CmsContainerPageBean modelGroupPage , CmsResource modelGroupResource ) { } } | CmsContainerElementBean result = null ; for ( CmsContainerElementBean element : modelGroupPage . getElements ( ) ) { if ( CmsContainerElement . ModelGroupState . isModelGroup . name ( ) . equals ( element . getIndividualSettings ( ) . get ( CmsContainerElement . MODEL_GROUP_STATE ) ) ) { result = element ; break ; } } return result ; |
public class Transforms { /** * Sigmoid function
* @ param ndArray
* @ param dup
* @ return */
public static INDArray sigmoid ( INDArray ndArray , boolean dup ) { } } | return exec ( dup ? new Sigmoid ( ndArray , ndArray . ulike ( ) ) : new Sigmoid ( ndArray ) ) ; |
public class DynamicVariableSet { /** * Gets the first variable index which can contain a replicated
* variable for the { @ code plateNum } th plate . The returned index is
* inclusive and may be used by the plate .
* @ param plateNum
* @ return */
private int getPlateStartIndex ( int plateNum ) { } } | Preconditions . checkArgument ( plateNum >= 0 && plateNum < plateNames . size ( ) ) ; int startOffset = fixedVariableMaxInd + 1 ; for ( int i = 0 ; i < plateNum ; i ++ ) { startOffset += plates . get ( i ) . getMaximumPlateSize ( ) * maximumReplications [ i ] ; } return startOffset ; |
public class AbstractDataBinder { /** * Returns , whether the data , which corresponds to a specific key , is currently cached , or not .
* @ param key
* The key , which corresponds to the data , which should be checked , as an instance of
* the generic type KeyType . The key may not be null
* @ return True , if the data , which corresponds to the given key , is currently cached , false
* otherwise */
public final boolean isCached ( @ NonNull final KeyType key ) { } } | Condition . INSTANCE . ensureNotNull ( key , "The key may not be null" ) ; synchronized ( cache ) { return cache . get ( key ) != null ; } |
public class ReflectionUtil { /** * Create an Uploader from its fully qualified class name .
* The class passed in by name must be assignable to Uploader .
* See the secor . upload . class config option .
* @ param className The class name of a subclass of Uploader
* @ return an UploadManager instance with the runtime type of the class passed by name
* @ throws Exception on error */
public static Uploader createUploader ( String className ) throws Exception { } } | Class < ? > clazz = Class . forName ( className ) ; if ( ! Uploader . class . isAssignableFrom ( clazz ) ) { throw new IllegalArgumentException ( String . format ( "The class '%s' is not assignable to '%s'." , className , Uploader . class . getName ( ) ) ) ; } return ( Uploader ) clazz . newInstance ( ) ; |
public class RuntimeMojoSupport { /** * Ensure Maven compatibility . Requires Maven 3 + */
private void ensureMavenCompatibility ( final ClassLoader classLoader ) throws MojoExecutionException { } } | Version mavenVersion = mavenVersionHelper . detectVersion ( classLoader ) ; if ( mavenVersion == null ) { // complain and continue
log . error ( "Unable to determine Maven version" ) ; } else { log . debug ( "Detected Maven version: {}" , mavenVersion ) ; if ( versionHelper . before ( 3 ) . containsVersion ( mavenVersion ) ) { throw new MojoExecutionException ( "Unsupported Maven version: " + mavenVersion ) ; } } |
public class CmsUserSettingsStringPropertyWrapper { /** * Gets the time warp .
* @ return the time warp */
@ PrefMetadata ( type = CmsTimeWarpPreference . class ) public String getTimeWarp ( ) { } } | long warp = m_settings . getTimeWarp ( ) ; return warp < 0 ? "" : "" + warp ; // if timewarp < 0 ( i . e . time warp is not set ) , use the empty string because we don ' t want the date selector widget to interpret the negative value |
public class JdbcSqlDriver { /** * 查询主键名 */
private Single < String > getIdColName ( String tableName ) { } } | String alias = "idColumnName" ; return new CustomSelectAction ( ) { @ Override protected String patternSql ( ) { return String . format ( "SELECT k.COLUMN_NAME as %s\n" + "FROM information_schema.table_constraints t\n" + "LEFT JOIN information_schema.key_column_usage k\n" + "USING(constraint_name,table_schema,table_name)\n" + "WHERE t.constraint_type='PRIMARY KEY'\n" + " AND t.table_schema=DATABASE() \n" + " AND t.table_name='%s';" , alias , tableName ) ; } @ Override public int resultType ( ) { return SELECT_SINGLE ; } } . execute ( null , null , connection , MsgIdHolder . get ( ) ) . map ( unitResponse -> { SingleRecordSelectionResult autoIncrement = unitResponse . getData ( ) ; if ( autoIncrement . getCount ( ) == 0 ) { LOG . info ( "No primary key exists in table " + tableName ) ; return "" ; } return autoIncrement . getRecord ( ) . get ( alias ) . toString ( ) ; } ) ; |
public class Ifc2x3tc1FactoryImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
public String convertIfcElementCompositionEnumToString ( EDataType eDataType , Object instanceValue ) { } } | return instanceValue == null ? null : instanceValue . toString ( ) ; |
public class OptionsSpiderPanel { /** * This method initializes the Parse robots . txt checkbox .
* @ return javax . swing . JCheckBox */
private JCheckBox getChkParseRobotsTxt ( ) { } } | if ( parseRobotsTxt == null ) { parseRobotsTxt = new JCheckBox ( ) ; parseRobotsTxt . setText ( Constant . messages . getString ( "spider.options.label.robotstxt" ) ) ; } return parseRobotsTxt ; |
public class UpdateCenter { /** * Gets the plugin with the given name from the first { @ link UpdateSite } to contain it .
* @ return Discovered { @ link Plugin } . { @ code null } if it cannot be found */
public @ CheckForNull Plugin getPlugin ( String artifactId ) { } } | for ( UpdateSite s : sites ) { Plugin p = s . getPlugin ( artifactId ) ; if ( p != null ) return p ; } return null ; |
public class ConsonantUtil { /** * * * * * * BEGINNING OF FUNCTION * * * * * */
/ * / public static boolean is_tavarga ( String str ) { } } | if ( str . equals ( "t" ) || str . equals ( "T" ) || str . equals ( "d" ) || str . equals ( "D" ) || str . equals ( "n" ) ) return true ; return false ; |
public class JobScheduleTerminateOptions { /** * Set a timestamp indicating the last modified time of the resource known to the client . The operation will be performed only if the resource on the service has not been modified since the specified time .
* @ param ifUnmodifiedSince the ifUnmodifiedSince value to set
* @ return the JobScheduleTerminateOptions object itself . */
public JobScheduleTerminateOptions withIfUnmodifiedSince ( DateTime ifUnmodifiedSince ) { } } | if ( ifUnmodifiedSince == null ) { this . ifUnmodifiedSince = null ; } else { this . ifUnmodifiedSince = new DateTimeRfc1123 ( ifUnmodifiedSince ) ; } return this ; |
public class Parser { /** * Found a name in a statement context . If it ' s a label , we gather
* up any following labels and the next non - label statement into a
* { @ link LabeledStatement } " bundle " and return that . Otherwise we parse
* an expression and return it wrapped in an { @ link ExpressionStatement } . */
private AstNode nameOrLabel ( ) throws IOException { } } | if ( currentToken != Token . NAME ) throw codeBug ( ) ; int pos = ts . tokenBeg ; // set check for label and call down to primaryExpr
currentFlaggedToken |= TI_CHECK_LABEL ; AstNode expr = expr ( ) ; if ( expr . getType ( ) != Token . LABEL ) { AstNode n = new ExpressionStatement ( expr , ! insideFunction ( ) ) ; n . lineno = expr . lineno ; return n ; } LabeledStatement bundle = new LabeledStatement ( pos ) ; recordLabel ( ( Label ) expr , bundle ) ; bundle . setLineno ( ts . lineno ) ; // look for more labels
AstNode stmt = null ; while ( peekToken ( ) == Token . NAME ) { currentFlaggedToken |= TI_CHECK_LABEL ; expr = expr ( ) ; if ( expr . getType ( ) != Token . LABEL ) { stmt = new ExpressionStatement ( expr , ! insideFunction ( ) ) ; autoInsertSemicolon ( stmt ) ; break ; } recordLabel ( ( Label ) expr , bundle ) ; } // no more labels ; now parse the labeled statement
try { currentLabel = bundle ; if ( stmt == null ) { stmt = statementHelper ( ) ; int ntt = peekToken ( ) ; if ( ntt == Token . COMMENT && stmt . getLineno ( ) == scannedComments . get ( scannedComments . size ( ) - 1 ) . getLineno ( ) ) { stmt . setInlineComment ( scannedComments . get ( scannedComments . size ( ) - 1 ) ) ; consumeToken ( ) ; } } } finally { currentLabel = null ; // remove the labels for this statement from the global set
for ( Label lb : bundle . getLabels ( ) ) { labelSet . remove ( lb . getName ( ) ) ; } } // If stmt has parent assigned its position already is relative
// ( See bug # 710225)
bundle . setLength ( stmt . getParent ( ) == null ? getNodeEnd ( stmt ) - pos : getNodeEnd ( stmt ) ) ; bundle . setStatement ( stmt ) ; return bundle ; |
public class HttpMethodBase { /** * Reads the response headers from the given { @ link HttpConnection connection } .
* Subclasses may want to override this method to to customize the
* processing .
* " It must be possible to combine the multiple header fields into one
* " field - name : field - value " pair , without changing the semantics of the
* message , by appending each subsequent field - value to the first , each
* separated by a comma . " - HTTP / 1.0 ( 4.3)
* @ param state the { @ link HttpState state } information associated with this method
* @ param conn the { @ link HttpConnection connection } used to execute
* this HTTP method
* @ throws IOException if an I / O ( transport ) error occurs . Some transport exceptions
* can be recovered from .
* @ throws HttpException if a protocol exception occurs . Usually protocol exceptions
* cannot be recovered from .
* @ see # readResponse
* @ see # processResponseHeaders */
protected void readResponseHeaders ( HttpState state , HttpConnection conn ) throws IOException , HttpException { } } | LOG . trace ( "enter HttpMethodBase.readResponseHeaders(HttpState," + "HttpConnection)" ) ; getResponseHeaderGroup ( ) . clear ( ) ; Header [ ] headers = HttpParser . parseHeaders ( conn . getResponseInputStream ( ) , getParams ( ) . getHttpElementCharset ( ) ) ; // Wire logging moved to HttpParser
getResponseHeaderGroup ( ) . setHeaders ( headers ) ; |
public class XParser { /** * Parses the given file , and returns the XLog instances
* extracted . The file is first checked against this parser ,
* to check whether it can be handled . If the parser cannot
* handle the given file , or the extraction itself fails ,
* the parser should raise an < code > IOException < / code > .
* @ param file The file to be parsed .
* @ return List of XLog instances parsed from the given
* file .
* @ throws Exception Raised in case the parser fails , or the
* given file cannot be processed . */
public List < XLog > parse ( File file ) throws Exception { } } | if ( canParse ( file ) ) { InputStream is = new FileInputStream ( file ) ; return parse ( is ) ; } else { throw new IllegalArgumentException ( "Parser cannot handle this file!" ) ; } |
public class MethodWriterImpl { /** * { @ inheritDoc } */
@ Override public Content getMethodDetailsTreeHeader ( TypeElement typeElement , Content memberDetailsTree ) { } } | memberDetailsTree . addContent ( HtmlConstants . START_OF_METHOD_DETAILS ) ; Content methodDetailsTree = writer . getMemberTreeHeader ( ) ; methodDetailsTree . addContent ( writer . getMarkerAnchor ( SectionName . METHOD_DETAIL ) ) ; Content heading = HtmlTree . HEADING ( HtmlConstants . DETAILS_HEADING , contents . methodDetailLabel ) ; methodDetailsTree . addContent ( heading ) ; return methodDetailsTree ; |
public class Matchers { /** * Matches an Annotation AST node if the argument to the annotation with the given name has a
* value which matches the given matcher . For example , { @ code hasArgumentWithValue ( " value " ,
* stringLiteral ( " one " ) ) } matches { @ code @ Thing ( " one " ) } or { @ code @ Thing ( { " one " , " two " } ) } or
* { @ code @ Thing ( value = " one " ) } */
public static Matcher < AnnotationTree > hasArgumentWithValue ( String argumentName , Matcher < ExpressionTree > valueMatcher ) { } } | return new AnnotationHasArgumentWithValue ( argumentName , valueMatcher ) ; |
public class SimonUtils { /** * Calls a block of code with stopwatch around , can not return any result or throw an exception
* ( use { @ link # doWithStopwatch ( String , java . util . concurrent . Callable ) } instead ) .
* @ param name name of the Stopwatch
* @ param runnable wrapped block of code
* @ since 3.0 */
public static void doWithStopwatch ( String name , Runnable runnable ) { } } | try ( Split split = SimonManager . getStopwatch ( name ) . start ( ) ) { runnable . run ( ) ; } |
public class CreateAlipayChargeParams { /** * A currency to give to the charge . Optional . < br >
* Default value is MXN < br >
* @ param currency
* @ return */
public CreateAlipayChargeParams currency ( final Currency currency ) { } } | return this . with ( "currency" , currency == null ? Currency . MXN . name ( ) : currency . name ( ) ) ; |
public class NumberUtil { /** * 保留固定位数小数 < br >
* 例如保留四位小数 : 123.456789 = 》 123.4567
* @ param v 值
* @ param scale 保留小数位数
* @ param roundingMode 保留小数的模式 { @ link RoundingMode }
* @ return 新值
* @ since 3.2.2 */
public static String roundStr ( double v , int scale , RoundingMode roundingMode ) { } } | return round ( v , scale , roundingMode ) . toString ( ) ; |
public class DistanceLearnerFactory { /** * find xxx in a string of the form xxx [ yyy = zzz ] or xxxx */
static private Class findClassFor ( String s ) throws ClassNotFoundException { } } | int endClassIndex = s . indexOf ( '[' ) ; if ( endClassIndex >= 0 ) s = s . substring ( 0 , endClassIndex ) ; try { return Class . forName ( s ) ; } catch ( ClassNotFoundException e ) { return Class . forName ( "com.wcohen.ss." + s ) ; } |
public class AbstractGenericsContext { /** * Useful for reporting or maybe logging . Resolves all generics and compose resulted type as string .
* < pre > { @ code class A extends B < Long > ;
* class B < T > {
* List < T > doSmth ( ) ;
* } } < / pre >
* Resolving parameters in type of root class :
* { @ code type ( B . class ) . toStringType ( B . class . getMethod ( " doSmth " ) . getGenericReturnType ( ) ) = = " List < Long > " }
* Check if type containing generics , belonging to different context in current hierarchy and
* automatically change context to properly resolve generics . Fails when it is impossible to correctly
* resolve generics ( preventing incorrect usage ) .
* @ param type to to get string of
* @ return string representation for resolved type
* @ throws WrongGenericsContextException if type contains generics not visible from current class */
public String toStringType ( final Type type ) { } } | return TypeToStringUtils . toStringType ( type , chooseContext ( type ) . contextGenerics ( ) ) ; |
public class MapViewerTemplate { /** * initializes the map view position .
* @ param mvp the map view position to be set
* @ return the mapviewposition set */
protected IMapViewPosition initializePosition ( IMapViewPosition mvp ) { } } | LatLong center = mvp . getCenter ( ) ; if ( center . equals ( new LatLong ( 0 , 0 ) ) ) { mvp . setMapPosition ( this . getInitialPosition ( ) ) ; } mvp . setZoomLevelMax ( getZoomLevelMax ( ) ) ; mvp . setZoomLevelMin ( getZoomLevelMin ( ) ) ; return mvp ; |
public class CmsRequestUtil { /** * Parses the parameters of the given request query part , optionally decodes them , and creates a parameter map out of them . < p >
* Please note : This does not parse a full request URI / URL , only the query part that
* starts after the " ? " . For example , in the URI < code > / system / index . html ? a = b & amp ; c = d < / code > ,
* the query part is < code > a = b & amp ; c = d < / code > . < p >
* If the given String is empty , an empty map is returned . < p >
* @ param query the query to parse
* @ param decodeParameters a flag , indicating if the parameters should be decoded .
* @ param encoding the character encoding used while decoding . If < code > null < / code > , the default character encoding is used .
* @ return the parameter map created from the query */
public static Map < String , String [ ] > createParameterMap ( String query , boolean decodeParameters , String encoding ) { } } | if ( CmsStringUtil . isEmpty ( query ) ) { // empty query
return new HashMap < String , String [ ] > ( ) ; } if ( query . charAt ( 0 ) == URL_DELIMITER . charAt ( 0 ) ) { // remove leading ' ? ' if required
query = query . substring ( 1 ) ; } // cut along the different parameters
String [ ] params = CmsStringUtil . splitAsArray ( query , PARAMETER_DELIMITER ) ; Map < String , String [ ] > parameters = new HashMap < String , String [ ] > ( params . length ) ; for ( int i = 0 ; i < params . length ; i ++ ) { String key = null ; String value = null ; // get key and value , separated by a ' = '
int pos = params [ i ] . indexOf ( PARAMETER_ASSIGNMENT ) ; if ( pos > 0 ) { key = params [ i ] . substring ( 0 , pos ) ; value = params [ i ] . substring ( pos + 1 ) ; } else if ( pos < 0 ) { key = params [ i ] ; value = "" ; } // adjust the key if it starts with " amp ; "
// this happens when " & amp ; " is used instead of a simple " & "
if ( ( key != null ) && ( key . startsWith ( AMP ) ) ) { key = key . substring ( AMP . length ( ) ) ; } // now make sure the values are of type String [ ]
if ( key != null ) { if ( decodeParameters ) { key = CmsEncoder . decode ( key , encoding ) ; value = CmsEncoder . decode ( value , encoding ) ; } String [ ] values = parameters . get ( key ) ; if ( values == null ) { // this is the first value , create new array
values = new String [ ] { value } ; } else { // append to the existing value array
String [ ] copy = new String [ values . length + 1 ] ; System . arraycopy ( values , 0 , copy , 0 , values . length ) ; copy [ copy . length - 1 ] = value ; values = copy ; } parameters . put ( key , values ) ; } } return parameters ; |
public class Scheduler { /** * cal . set ( Calendar . DAY _ OF _ WEEK , dayOfWeek ) ;
* @ param dayOfWeek
* @ return time date */
public static Date toDateDayOfWeek ( int dayOfWeek ) { } } | Calendar cal = Calendar . getInstance ( ) ; cal . set ( Calendar . DAY_OF_WEEK , dayOfWeek ) ; return cal . getTime ( ) ; |
public class EntryStream { /** * Returns a sequential ordered { @ code EntryStream } containing the possible
* pairs of elements taken from the provided list .
* Both keys and values are taken from the input list . The index of the key
* is always strictly less than the index of the value . The pairs in the
* stream are lexicographically ordered . For example , for the list of three
* elements the stream of three elements is created :
* { @ code Map . Entry ( list . get ( 0 ) , list . get ( 1 ) ) } ,
* { @ code Map . Entry ( list . get ( 0 ) , list . get ( 2 ) ) } and
* { @ code Map . Entry ( list . get ( 1 ) , list . get ( 2 ) ) } . The number of elements in
* the resulting stream is { @ code list . size ( ) * ( list . size ( ) + 1L ) / 2 } .
* The list values are accessed using { @ link List # get ( int ) } , so the list
* should provide fast random access . The list is assumed to be unmodifiable
* during the stream operations .
* @ param < T > type of the list elements
* @ param list a list to take the elements from
* @ return a new { @ code EntryStream }
* @ see StreamEx # ofPairs ( List , BiFunction )
* @ since 0.3.6 */
public static < T > EntryStream < T , T > ofPairs ( List < T > list ) { } } | return of ( new PairPermutationSpliterator < > ( list , SimpleImmutableEntry :: new ) ) ; |
public class HibernateLayer { /** * This implementation does not support the ' offset ' parameter . The maxResultSize parameter is not used ( limiting
* the result needs to be done after security { @ link org . geomajas . internal . layer . vector . GetFeaturesEachStep } ) . If
* you expect large results to be returned enable scrollableResultSet to retrieve only as many records as needed . */
public Iterator < ? > getElements ( Filter filter , int offset , int maxResultSize ) throws LayerException { } } | try { Session session = getSessionFactory ( ) . getCurrentSession ( ) ; Criteria criteria = session . createCriteria ( getFeatureInfo ( ) . getDataSourceName ( ) ) ; if ( filter != null ) { if ( filter != Filter . INCLUDE ) { CriteriaVisitor visitor = new CriteriaVisitor ( ( HibernateFeatureModel ) featureModel , dateFormat ) ; Criterion c = ( Criterion ) filter . accept ( visitor , criteria ) ; if ( c != null ) { criteria . add ( c ) ; } } } // Sorting of elements .
if ( getFeatureInfo ( ) . getSortAttributeName ( ) != null ) { if ( SortType . ASC . equals ( getFeatureInfo ( ) . getSortType ( ) ) ) { criteria . addOrder ( Order . asc ( getFeatureInfo ( ) . getSortAttributeName ( ) ) ) ; } else { criteria . addOrder ( Order . desc ( getFeatureInfo ( ) . getSortAttributeName ( ) ) ) ; } } criteria . setResultTransformer ( Criteria . DISTINCT_ROOT_ENTITY ) ; if ( isScrollableResultSet ( ) ) { return ( Iterator < ? > ) new ScrollIterator ( criteria . scroll ( ) ) ; } else { List < ? > list = criteria . list ( ) ; return list . iterator ( ) ; } } catch ( HibernateException he ) { throw new HibernateLayerException ( he , ExceptionCode . HIBERNATE_LOAD_FILTER_FAIL , getFeatureInfo ( ) . getDataSourceName ( ) , filter . toString ( ) ) ; } |
public class CPDefinitionOptionValueRelUtil { /** * Returns a range of all the cp definition option value rels where companyId = & # 63 ; .
* Useful when paginating results . Returns a maximum of < code > end - start < / code > instances . < code > start < / code > and < code > end < / code > are not primary keys , they are indexes in the result set . Thus , < code > 0 < / code > refers to the first result in the set . Setting both < code > start < / code > and < code > end < / code > to { @ link QueryUtil # ALL _ POS } will return the full result set . If < code > orderByComparator < / code > is specified , then the query will include the given ORDER BY logic . If < code > orderByComparator < / code > is absent and pagination is required ( < code > start < / code > and < code > end < / code > are not { @ link QueryUtil # ALL _ POS } ) , then the query will include the default ORDER BY logic from { @ link CPDefinitionOptionValueRelModelImpl } . If both < code > orderByComparator < / code > and pagination are absent , for performance reasons , the query will not have an ORDER BY clause and the returned result set will be sorted on by the primary key in an ascending order .
* @ param companyId the company ID
* @ param start the lower bound of the range of cp definition option value rels
* @ param end the upper bound of the range of cp definition option value rels ( not inclusive )
* @ return the range of matching cp definition option value rels */
public static List < CPDefinitionOptionValueRel > findByCompanyId ( long companyId , int start , int end ) { } } | return getPersistence ( ) . findByCompanyId ( companyId , start , end ) ; |
public class LruCache { /** * Returns the keys stored in the cache */
public Iterator < K > keys ( ) { } } | KeyIterator < K , V > iter = new KeyIterator < K , V > ( this ) ; iter . init ( this ) ; return iter ; |
public class StoragePreferenceFragment { /** * END PERMISSION CHECK */
@ Override public void onClick ( View v ) { } } | switch ( v . getId ( ) ) { case R . id . buttonManualCacheEntry : { showManualEntry ( ) ; } break ; case R . id . buttonSetCache : { showPickCacheFromList ( ) ; } break ; } |
public class Descriptor { /** * Unlike { @ link # clazz } , return the parameter type ' T ' , which determines
* the { @ link DescriptorExtensionList } that this goes to .
* In those situations where subtypes cannot provide the type parameter ,
* this method can be overridden to provide it . */
public Class < T > getT ( ) { } } | Type subTyping = Types . getBaseClass ( getClass ( ) , Descriptor . class ) ; if ( ! ( subTyping instanceof ParameterizedType ) ) { throw new IllegalStateException ( getClass ( ) + " doesn't extend Descriptor with a type parameter." ) ; } return Types . erasure ( Types . getTypeArgument ( subTyping , 0 ) ) ; |
public class NaiveTokenizer { /** * Sets the minimum allowed token length . Any token discovered shorter than
* the minimum length will not be accepted and skipped over . The default
* is 0.
* @ param minTokenLength the minimum length for a token to be used */
public void setMinTokenLength ( int minTokenLength ) { } } | if ( minTokenLength < 0 ) throw new IllegalArgumentException ( "Minimum token length must be non negative, not " + minTokenLength ) ; if ( minTokenLength > maxTokenLength ) throw new IllegalArgumentException ( "Minimum token length can not exced the maximum token length" ) ; this . minTokenLength = minTokenLength ; |
public class DocumentQuery { /** * TBD expr IDocumentQuery < T > IDocumentQueryBase < T , IDocumentQuery < T > > . OrderByDistanceDescending < TValue > ( Expression < Func < T , TValue > > propertySelector , string shapeWkt ) */
@ Override public IDocumentQuery < T > orderByDistanceDescending ( String fieldName , String shapeWkt ) { } } | _orderByDistanceDescending ( fieldName , shapeWkt ) ; return this ; |
public class GenClassFactory { /** * Creates generated class by using ClassLoader . Return null if unable to
* load class
* @ param cls Annotated class acting also as superclass for created parser
* @ return
* @ throws ClassNotFoundException */
public static Class < ? > loadGenClass ( Class < ? > cls ) throws ClassNotFoundException { } } | Class < ? > parserClass = map . get ( cls ) ; if ( parserClass == null ) { GenClassname genClassname = cls . getAnnotation ( GenClassname . class ) ; if ( genClassname == null ) { throw new IllegalArgumentException ( "@GenClassname not set in " + cls ) ; } parserClass = Class . forName ( genClassname . value ( ) ) ; map . put ( cls , parserClass ) ; } return parserClass ; |
public class ProjectNodeSupport { /** * @ param origin origin source
* @ param ident unique identity for this cached source , used in filename
* @ param descr description of the source , used in logging
* @ return new source */
private ResourceModelSource createCachingSource ( ResourceModelSource origin , String ident , String descr ) { } } | return createCachingSource ( origin , ident , descr , SourceFactory . CacheType . BOTH , true ) ; |
public class RulePluralizer { /** * Defines a word with irregular plural . Ensures that the case of
* the first letter is taken over .
* < pre > { @ code
* irregular ( " cow " , " kine " ) ;
* pluralOf ( " cow " ) ; / / - > " kine "
* pluralOf ( " Cow " ) ; / / - > " Kine "
* singularOf ( " kine " ) ; / / - > " cow "
* singularOf ( " Kine " ) ; / / - > " Cow "
* } < / pre >
* The implementation of this method relies on
* { @ code plural } and { @ code singular } .
* @ param singular
* @ param plural
* @ see # plural ( org . cthul . strings . plural . RulePluralizer . Rule )
* @ see # singular ( org . cthul . strings . plural . RulePluralizer . Rule ) */
protected void irregular ( String singular , String plural ) { } } | plural = lower ( plural ) ; singular = lower ( singular ) ; plural ( newIrregularRule ( singular , plural , locale ) ) ; singular ( newIrregularRule ( plural , singular , locale ) ) ; |
public class CallSimulator { /** * Return the next call event that is safe for delivery or null
* if there are no safe objects to deliver .
* Null response could mean empty , or could mean all objects
* are scheduled for the future .
* @ param systemCurrentTimeMillis The current time .
* @ return CallEvent */
@ Override public CallEvent next ( long systemCurrentTimeMillis ) { } } | // check for time passing
if ( systemCurrentTimeMillis > currentSystemMilliTimestamp ) { // build a target for this 1ms window
long eventBacklog = targetEventsThisMillisecond - eventsSoFarThisMillisecond ; targetEventsThisMillisecond = ( long ) Math . floor ( targetEventsPerMillisecond ) ; double targetFraction = targetEventsPerMillisecond - targetEventsThisMillisecond ; targetEventsThisMillisecond += ( rand . nextDouble ( ) <= targetFraction ) ? 1 : 0 ; targetEventsThisMillisecond += eventBacklog ; // reset counter for this 1ms window
eventsSoFarThisMillisecond = 0 ; currentSystemMilliTimestamp = systemCurrentTimeMillis ; } // drain scheduled events first
CallEvent callEvent = delayedEvents . nextReady ( systemCurrentTimeMillis ) ; if ( callEvent != null ) { // double check this is an end event
assert ( callEvent . startTS == null ) ; assert ( callEvent . endTS != null ) ; // return the agent / phone for this event to the available lists
agentsAvailable . add ( callEvent . agentId ) ; phoneNumbersAvailable . add ( callEvent . phoneNo ) ; validate ( ) ; return callEvent ; } // check if we made all the target events for this 1ms window
if ( targetEventsThisMillisecond == eventsSoFarThisMillisecond ) { validate ( ) ; return null ; } // generate rando event ( begin / end pair )
CallEvent [ ] event = makeRandomEvent ( ) ; // this means all agents are busy
if ( event == null ) { validate ( ) ; return null ; } // schedule the end event
long endTimeKey = event [ 1 ] . endTS . getTime ( ) ; assert ( ( endTimeKey - systemCurrentTimeMillis ) < ( config . maxcalldurationseconds * 1000 ) ) ; delayedEvents . add ( endTimeKey , event [ 1 ] ) ; eventsSoFarThisMillisecond ++ ; validate ( ) ; return event [ 0 ] ; |
public class XTraceBufferedImpl { /** * ( non - Javadoc )
* @ see java . util . List # remove ( int ) */
public XEvent remove ( int index ) { } } | try { XEvent result = events . remove ( index ) ; return result ; } catch ( IOException e ) { e . printStackTrace ( ) ; return null ; } |
public class MetricRegistryImpl { /** * Shuts down this registry and the associated { @ link MetricReporter } .
* < p > NOTE : This operation is asynchronous and returns a future which is completed
* once the shutdown operation has been completed .
* @ return Future which is completed once the { @ link MetricRegistryImpl }
* is shut down . */
public CompletableFuture < Void > shutdown ( ) { } } | synchronized ( lock ) { if ( isShutdown ) { return terminationFuture ; } else { isShutdown = true ; final Collection < CompletableFuture < Void > > terminationFutures = new ArrayList < > ( 3 ) ; final Time gracePeriod = Time . seconds ( 1L ) ; if ( metricQueryServiceRpcService != null ) { final CompletableFuture < Void > metricQueryServiceRpcServiceTerminationFuture = metricQueryServiceRpcService . stopService ( ) ; terminationFutures . add ( metricQueryServiceRpcServiceTerminationFuture ) ; } Throwable throwable = null ; for ( MetricReporter reporter : reporters ) { try { reporter . close ( ) ; } catch ( Throwable t ) { throwable = ExceptionUtils . firstOrSuppressed ( t , throwable ) ; } } reporters . clear ( ) ; if ( throwable != null ) { terminationFutures . add ( FutureUtils . completedExceptionally ( new FlinkException ( "Could not shut down the metric reporters properly." , throwable ) ) ) ; } final CompletableFuture < Void > executorShutdownFuture = ExecutorUtils . nonBlockingShutdown ( gracePeriod . toMilliseconds ( ) , TimeUnit . MILLISECONDS , executor ) ; terminationFutures . add ( executorShutdownFuture ) ; FutureUtils . completeAll ( terminationFutures ) . whenComplete ( ( Void ignored , Throwable error ) -> { if ( error != null ) { terminationFuture . completeExceptionally ( error ) ; } else { terminationFuture . complete ( null ) ; } } ) ; return terminationFuture ; } } |
public class ClientStats { /** * < p > Return an average throughput of transactions acknowledged per
* second for the duration covered by this stats instance . < / p >
* < p > Essentially < code > { @ link # getInvocationsCompleted ( ) } divided by
* ( { @ link # getStartTimestamp ( ) } - { @ link # getEndTimestamp ( ) } / 1000.0 ) < / code > ,
* but with additional safety checks . < / p >
* @ return Throughput in transactions acknowledged per second . */
public long getTxnThroughput ( ) { } } | assert ( m_startTS != Long . MAX_VALUE ) ; assert ( m_endTS != Long . MIN_VALUE ) ; if ( m_invocationsCompleted == 0 ) return 0 ; if ( m_endTS < m_startTS ) { m_endTS = m_startTS + 1 ; // 1 ms duration is sorta cheatin '
} long durationMs = m_endTS - m_startTS ; return ( long ) ( m_invocationsCompleted / ( durationMs / 1000.0 ) ) ; |
public class TiffITProfile { /** * Validate Screened Data image .
* @ param ifd the ifd
* @ param p the profile ( default = 0 , P2 = 2) */
private void validateIfdSD ( IFD ifd , int p ) { } } | IfdTags metadata = ifd . getMetadata ( ) ; if ( p == 2 ) { checkRequiredTag ( metadata , "NewSubfileType" , 1 , new long [ ] { 0 } ) ; } checkRequiredTag ( metadata , "ImageLength" , 1 ) ; checkRequiredTag ( metadata , "ImageWidth" , 1 ) ; checkRequiredTag ( metadata , "BitsPerSample" , 1 , new long [ ] { 1 } ) ; checkRequiredTag ( metadata , "Compression" , 1 , new long [ ] { 1 , 4 , 8 } ) ; checkRequiredTag ( metadata , "PhotometricInterpretation" , 1 , new long [ ] { 5 } ) ; checkRequiredTag ( metadata , "StripOffsets" , 1 ) ; if ( p == 0 ) { checkRequiredTag ( metadata , "Orientation" , 1 , new long [ ] { 1 , 4 , 5 , 8 } ) ; } else { checkRequiredTag ( metadata , "Orientation" , 1 , new long [ ] { 1 } ) ; } if ( p == 2 ) { checkRequiredTag ( metadata , "SamplesPerPixel" , 1 , new long [ ] { 1 , 4 } ) ; } checkRequiredTag ( metadata , "StripBYTECount" , 1 ) ; checkRequiredTag ( metadata , "XResolution" , 1 ) ; checkRequiredTag ( metadata , "YResolution" , 1 ) ; checkRequiredTag ( metadata , "PlanarConfiguration" , 1 , new long [ ] { 2 } ) ; if ( p == 2 ) { checkRequiredTag ( metadata , "ResolutionUnit" , 1 , new long [ ] { 2 , 3 } ) ; checkRequiredTag ( metadata , "NumberOfInks" , 1 , new long [ ] { 4 } ) ; } checkRequiredTag ( metadata , "InkSet" , 1 , new long [ ] { 1 } ) ; checkRequiredTag ( metadata , "BackgroundColorIndicator" , 1 , new long [ ] { 0 , 1 , 2 } ) ; |
public class GetClassifiersResult { /** * The requested list of classifier objects .
* < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use
* { @ link # setClassifiers ( java . util . Collection ) } or { @ link # withClassifiers ( java . util . Collection ) } if you want to
* override the existing values .
* @ param classifiers
* The requested list of classifier objects .
* @ return Returns a reference to this object so that method calls can be chained together . */
public GetClassifiersResult withClassifiers ( Classifier ... classifiers ) { } } | if ( this . classifiers == null ) { setClassifiers ( new java . util . ArrayList < Classifier > ( classifiers . length ) ) ; } for ( Classifier ele : classifiers ) { this . classifiers . add ( ele ) ; } return this ; |
public class CollUtil { /** * 新建一个List < br >
* 提供的参数为null时返回空 { @ link ArrayList }
* @ param < T > 集合元素类型
* @ param isLinked 是否新建LinkedList
* @ param enumration { @ link Enumeration }
* @ return ArrayList对象
* @ since 3.0.8 */
public static < T > List < T > list ( boolean isLinked , Enumeration < T > enumration ) { } } | final List < T > list = list ( isLinked ) ; if ( null != enumration ) { while ( enumration . hasMoreElements ( ) ) { list . add ( enumration . nextElement ( ) ) ; } } return list ; |
public class InvalidationAuditDaemon { /** * This notifies this daemon that a specified cache instance has cleared . It
* clears the internal tables
* @ param cache The cache instance . */
public void cacheCleared ( String cacheName ) { } } | InvalidationTableList list = ( InvalidationTableList ) cacheinvalidationTables . get ( cacheName ) ; if ( list != null ) list . clear ( ) ; |
public class HandoffResultsListener { /** * Builds a runnable to shut down a work unit after a configurable delay once handoff
* has completed . If the cluster has been instructed to shut down and the last work unit
* has been handed off , this task also directs this Ordasity instance to shut down . */
private Runnable shutdownAfterHandoff ( final String workUnit ) { } } | final Cluster cluster = this . cluster ; final Logger log = LOG ; return new Runnable ( ) { @ Override public void run ( ) { String str = cluster . getHandoffResult ( workUnit ) ; log . info ( "Shutting down {} following handoff to {}." , workUnit , ( str == null ) ? "(None)" : str ) ; cluster . shutdownWork ( workUnit , /* doLog = */
false ) ; if ( cluster . hasState ( NodeState . Draining ) && cluster . myWorkUnits . isEmpty ( ) ) { cluster . shutdown ( ) ; } } } ; |
public class StringUtils { /** * Join a collection of strings together into one .
* @ param chunks the chunks to join .
* @ param delimiter the delimiter between the keys .
* @ return the fully joined string . */
public static String join ( final Collection < String > chunks , final String delimiter ) { } } | StringBuilder sb = new StringBuilder ( ) ; if ( ! chunks . isEmpty ( ) ) { Iterator < String > itr = chunks . iterator ( ) ; sb . append ( itr . next ( ) ) ; while ( itr . hasNext ( ) ) { sb . append ( delimiter ) ; sb . append ( itr . next ( ) ) ; } } return sb . toString ( ) ; |
public class Decoder { /** * < p > Performs RS error correction on an array of bits . < / p >
* @ return the corrected array
* @ throws FormatException if the input contains too many errors */
private boolean [ ] correctBits ( boolean [ ] rawbits ) throws FormatException { } } | GenericGF gf ; int codewordSize ; if ( ddata . getNbLayers ( ) <= 2 ) { codewordSize = 6 ; gf = GenericGF . AZTEC_DATA_6 ; } else if ( ddata . getNbLayers ( ) <= 8 ) { codewordSize = 8 ; gf = GenericGF . AZTEC_DATA_8 ; } else if ( ddata . getNbLayers ( ) <= 22 ) { codewordSize = 10 ; gf = GenericGF . AZTEC_DATA_10 ; } else { codewordSize = 12 ; gf = GenericGF . AZTEC_DATA_12 ; } int numDataCodewords = ddata . getNbDatablocks ( ) ; int numCodewords = rawbits . length / codewordSize ; if ( numCodewords < numDataCodewords ) { throw FormatException . getFormatInstance ( ) ; } int offset = rawbits . length % codewordSize ; int [ ] dataWords = new int [ numCodewords ] ; for ( int i = 0 ; i < numCodewords ; i ++ , offset += codewordSize ) { dataWords [ i ] = readCode ( rawbits , offset , codewordSize ) ; } try { ReedSolomonDecoder rsDecoder = new ReedSolomonDecoder ( gf ) ; rsDecoder . decode ( dataWords , numCodewords - numDataCodewords ) ; } catch ( ReedSolomonException ex ) { throw FormatException . getFormatInstance ( ex ) ; } // Now perform the unstuffing operation .
// First , count how many bits are going to be thrown out as stuffing
int mask = ( 1 << codewordSize ) - 1 ; int stuffedBits = 0 ; for ( int i = 0 ; i < numDataCodewords ; i ++ ) { int dataWord = dataWords [ i ] ; if ( dataWord == 0 || dataWord == mask ) { throw FormatException . getFormatInstance ( ) ; } else if ( dataWord == 1 || dataWord == mask - 1 ) { stuffedBits ++ ; } } // Now , actually unpack the bits and remove the stuffing
boolean [ ] correctedBits = new boolean [ numDataCodewords * codewordSize - stuffedBits ] ; int index = 0 ; for ( int i = 0 ; i < numDataCodewords ; i ++ ) { int dataWord = dataWords [ i ] ; if ( dataWord == 1 || dataWord == mask - 1 ) { // next codewordSize - 1 bits are all zeros or all ones
Arrays . fill ( correctedBits , index , index + codewordSize - 1 , dataWord > 1 ) ; index += codewordSize - 1 ; } else { for ( int bit = codewordSize - 1 ; bit >= 0 ; -- bit ) { correctedBits [ index ++ ] = ( dataWord & ( 1 << bit ) ) != 0 ; } } } return correctedBits ; |
public class ExpandableFieldDeserializer { /** * Deserializes an expandable field JSON payload ( i . e . either a string with just the ID , or a full
* JSON object ) into an { @ link ExpandableField } object . */
@ Override public ExpandableField < ? > deserialize ( JsonElement json , Type typeOfT , JsonDeserializationContext context ) throws JsonParseException { } } | if ( json . isJsonNull ( ) ) { return null ; } ExpandableField < ? > expandableField ; // Check if json is a String ID . If so , the field has not been expanded , so we only need to
// serialize a String and create a new ExpandableField with the String id only .
if ( json . isJsonPrimitive ( ) ) { JsonPrimitive jsonPrimitive = json . getAsJsonPrimitive ( ) ; if ( jsonPrimitive . isString ( ) ) { expandableField = new ExpandableField < > ( jsonPrimitive . getAsString ( ) , null ) ; return expandableField ; } else { throw new JsonParseException ( "ExpandableField is a non-string primitive type." ) ; } // Check if json is an expanded Object . If so , the field has been expanded , so we need to
// serialize it into the proper typeOfT , and create an ExpandableField with both the String id
// and this serialized object .
} else if ( json . isJsonObject ( ) ) { // Get the ` id ` out of the response
JsonObject fieldAsJsonObject = json . getAsJsonObject ( ) ; String id = fieldAsJsonObject . getAsJsonPrimitive ( "id" ) . getAsString ( ) ; // We need to get the type inside the generic ExpandableField to make sure fromJson correctly
// serializes the JsonObject :
Type clazz = ( ( ParameterizedType ) typeOfT ) . getActualTypeArguments ( ) [ 0 ] ; expandableField = new ExpandableField < > ( id , ( HasId ) context . deserialize ( json , clazz ) ) ; return expandableField ; } // If json is neither a String nor an Object , error . ( We expect all expandable objects to fit
// the known string - or - object design . If one doesn ' t , then something may have changed in the API
// and this code may need to be updated . )
throw new JsonParseException ( "ExpandableField is a non-object, non-primitive type." ) ; |
public class SessionManagerActor { /** * Spawn new session
* @ param uid user ' s id
* @ param ownKeyGroup own key group id
* @ param theirKeyGroup their key group Id
* @ param ownIdentity own identity private key
* @ param theirIdentity their identity public key
* @ param ownPreKey own pre key
* @ param theirPreKey their pre key
* @ return spawned session */
private PeerSession spawnSession ( int uid , int ownKeyGroup , int theirKeyGroup , PrivateKey ownIdentity , PublicKey theirIdentity , PrivateKey ownPreKey , PublicKey theirPreKey ) { } } | // Calculating Master Secret
byte [ ] masterSecret = RatchetMasterSecret . calculateMasterSecret ( new RatchetPrivateKey ( ownIdentity . getKey ( ) ) , new RatchetPrivateKey ( ownPreKey . getKey ( ) ) , new RatchetPublicKey ( theirIdentity . getPublicKey ( ) ) , new RatchetPublicKey ( theirPreKey . getPublicKey ( ) ) ) ; // Building Session
PeerSession peerSession = new PeerSession ( RandomUtils . nextRid ( ) , uid , ownKeyGroup , theirKeyGroup , ownPreKey . getKeyId ( ) , theirPreKey . getKeyId ( ) , masterSecret ) ; // Saving session in sessions storage
PeerSessionsStorage sessionsStorage = peerSessions . getValue ( uid ) ; if ( sessionsStorage == null ) { sessionsStorage = new PeerSessionsStorage ( uid , new ArrayList < PeerSession > ( ) ) ; } sessionsStorage = sessionsStorage . addSession ( peerSession ) ; peerSessions . addOrUpdateItem ( sessionsStorage ) ; return peerSession ; |
public class JMSDestinationDefinitionInjectionBinding { /** * ( non - Javadoc )
* @ see com . ibm . wsspi . injectionengine . InjectionBinding # merge ( java . lang . annotation . Annotation , java . lang . Class , java . lang . reflect . Member ) */
@ Override public void merge ( JMSDestinationDefinition annotation , Class < ? > instanceClass , Member member ) throws InjectionException { } } | if ( member != null ) { // JMSDestinationDefinition is a class - level annotation only .
throw new IllegalArgumentException ( member . toString ( ) ) ; } name = mergeAnnotationValue ( name , isXmlNameSet , annotation . name ( ) , JMSDestinationProperties . NAME . getAnnotationKey ( ) , "" ) ; interfaceName = mergeAnnotationValue ( interfaceName , isXmlInterfaceNameSet , annotation . interfaceName ( ) , JMSDestinationProperties . INTERFACE_NAME . getAnnotationKey ( ) , "" ) ; className = mergeAnnotationValue ( className , isXmlclassNameSet , annotation . className ( ) , JMSDestinationProperties . CLASS_NAME . getAnnotationKey ( ) , "" ) ; description = mergeAnnotationValue ( description , isXmlDescriptionSet , annotation . description ( ) , JMSDestinationProperties . DESCRIPTION . getAnnotationKey ( ) , "" ) ; destinationName = mergeAnnotationValue ( destinationName , isXmlDestinationNameSet , annotation . destinationName ( ) , JMSDestinationProperties . DESTINATION_NAME . getAnnotationKey ( ) , "" ) ; resourceAdapter = mergeAnnotationValue ( resourceAdapter , isXmlResourceAdapterSet , annotation . resourceAdapter ( ) , JMSDestinationProperties . RESOURCE_ADAPTER . getAnnotationKey ( ) , "" ) ; properties = mergeAnnotationProperties ( properties , xmlProperties , annotation . properties ( ) ) ; |
public class JDBCUtils { /** * query , because need to manually close the resource , so not recommended
* for use it
* @ param sql
* @ param args
* @ return ResultSet */
@ Deprecated public static ResultSet query ( String sql , Object ... args ) { } } | ResultSet result = null ; Connection con = getconnnection ( ) ; PreparedStatement ps = null ; try { ps = con . prepareStatement ( sql ) ; if ( args != null ) { for ( int i = 0 ; i < args . length ; i ++ ) { ps . setObject ( ( i + 1 ) , args [ i ] ) ; } } result = ps . executeQuery ( ) ; } catch ( SQLException e ) { e . printStackTrace ( ) ; } return result ; |
public class SecurityServiceImpl { /** * Retrieve the AuthorizationService for the specified id .
* @ param id AuthorizationService id to retrieve
* @ return A non - null AuthorizationService instance . */
private AuthorizationService getAuthorizationService ( String id ) { } } | AuthorizationService service = authorization . getService ( id ) ; if ( service == null ) { throwIllegalArgumentExceptionInvalidAttributeValue ( SecurityConfiguration . CFG_KEY_AUTHORIZATION_REF , id ) ; } return service ; |
public class XmlUtils { /** * Prints the XML { @ link Document } to standard out with handy indentation .
* @ param doc The { @ link Document } to print . */
public static void printDoc ( final Document doc ) { } } | try { final Transformer trans = TransformerFactory . newInstance ( ) . newTransformer ( ) ; trans . setOutputProperty ( OutputKeys . INDENT , "yes" ) ; trans . transform ( new DOMSource ( doc ) , new StreamResult ( System . out ) ) ; } catch ( final TransformerConfigurationException e ) { LOG . error ( "Could not configure transformer" , e ) ; } catch ( final TransformerFactoryConfigurationError e ) { LOG . error ( "Could not configure transformer factory" , e ) ; } catch ( final TransformerException e ) { LOG . error ( "Error running transformation" , e ) ; } |
public class WAjaxControl { /** * Get the target WComponents that will be repainted as a consequence of the AJAX request .
* When the AJAX request is triggered only the target component ( s ) will be re - painted . An empty list is returned if
* no targets have been defined .
* @ return the target regions that are repainted in the AJAX response */
public List < AjaxTarget > getTargets ( ) { } } | List < AjaxTarget > targets = getComponentModel ( ) . targets ; if ( targets == null ) { return Collections . emptyList ( ) ; } return Collections . unmodifiableList ( targets ) ; |
public class BackupResourceVaultConfigsInner { /** * Fetches resource vault config .
* @ param vaultName The name of the recovery services vault .
* @ param resourceGroupName The name of the resource group where the recovery services vault is present .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the observable to the BackupResourceVaultConfigResourceInner object */
public Observable < BackupResourceVaultConfigResourceInner > getAsync ( String vaultName , String resourceGroupName ) { } } | return getWithServiceResponseAsync ( vaultName , resourceGroupName ) . map ( new Func1 < ServiceResponse < BackupResourceVaultConfigResourceInner > , BackupResourceVaultConfigResourceInner > ( ) { @ Override public BackupResourceVaultConfigResourceInner call ( ServiceResponse < BackupResourceVaultConfigResourceInner > response ) { return response . body ( ) ; } } ) ; |
public class SubsystemFeatureDefinitionImpl { /** * ( non - Javadoc )
* @ see com . ibm . ws . kernel . feature . FeatureDefinition # isCapabilitySatified ( java . util . Collection ) */
@ Override public boolean isCapabilitySatisfied ( Collection < ProvisioningFeatureDefinition > featureDefinitionsToCheck ) { } } | // If it isn ' t an autofeature , it ' s satisfied .
if ( ! iAttr . isAutoFeature ) return true ; if ( mfDetails == null ) throw new IllegalStateException ( "Method called outside of provisioining operation or without a registered service" ) ; boolean isCapabilitySatisfied = true ; Iterator < Filter > iter = mfDetails . getCapabilityFilters ( ) . iterator ( ) ; Set < ProvisioningFeatureDefinition > satisfiedFeatureDefs = new HashSet < ProvisioningFeatureDefinition > ( ) ; // Now we need to iterate over each of the filters , until we find we don ' t have a match .
while ( iter . hasNext ( ) && isCapabilitySatisfied ) { Filter checkFilter = iter . next ( ) ; Iterator < ProvisioningFeatureDefinition > featureDefIter = featureDefinitionsToCheck . iterator ( ) ; // Now for each filter , iterate over each of the FeatureDefinition headers , checking to see if we have a match .
boolean featureMatch = false ; while ( featureDefIter . hasNext ( ) && ! featureMatch ) { ProvisioningFeatureDefinition featureDef = featureDefIter . next ( ) ; // If we ' ve already satisfied a capability with this FeatureDefinition , we don ' t need to use it again
if ( ! satisfiedFeatureDefs . contains ( featureDef ) ) { // We have a mismatch between the key the filter is using to look up the feature name and the property containing the name in the
// headers . So we need to add a new property for osgi . identity ( filter key ) that contains the value of the
// Subsystem - SymbolicName ( manifest header ) .
// We also have to do this for the Subsystem - Type ( manifest header ) and the type ( filter key ) .
Map < String , String > filterProps = new HashMap < String , String > ( ) ; filterProps . put ( FeatureDefinitionUtils . FILTER_FEATURE_KEY , featureDef . getSymbolicName ( ) ) ; try { filterProps . put ( FeatureDefinitionUtils . FILTER_TYPE_KEY , mfDetails . getMainAttributeValue ( FeatureDefinitionUtils . TYPE ) ) ; } catch ( IOException e ) { // We should be well beyond any IOException issues . .
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "IOException reading manifest attribute from {0}: {1}" , iAttr . featureFile , e ) ; } continue ; } if ( checkFilter . matches ( filterProps ) ) { satisfiedFeatureDefs . add ( featureDef ) ; featureMatch = true ; } } } // Once we ' ve checked all the FeatureDefinitions , apply the result to the isCapabilitySatisfied boolean ,
// so we stop processing as soon as we know we don ' t have a match .
isCapabilitySatisfied = featureMatch ; } return isCapabilitySatisfied ; |
public class ChannelManager { /** * Removes a channel with its given identification . */
public void removeChannel ( T channel , String functionalityName , String requesterID , String conversationID ) { } } | synchronized ( this . channelsWithId ) { this . channelsWithId . remove ( calculateId ( functionalityName , requesterID , conversationID ) ) ; } synchronized ( this . channelsWithFunctionalityName ) { this . channelsWithFunctionalityName . get ( functionalityName ) . remove ( channel ) ; } |
public class AfplibPackageImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
public EEnum getMDDMDDFlgs ( ) { } } | if ( mddmddFlgsEEnum == null ) { mddmddFlgsEEnum = ( EEnum ) EPackage . Registry . INSTANCE . getEPackage ( AfplibPackage . eNS_URI ) . getEClassifiers ( ) . get ( 46 ) ; } return mddmddFlgsEEnum ; |
public class Template { /** * Check to see if the given name is a valid template name . Valid template
* names may include only letters , digits , underscores , hyphens , periods ,
* pluses , and slashes . In addition , each term when split by slashes must
* not be empty and must not start with a period . The second case excludes
* potential hidden files and special names like " . " and " . . " .
* @ param name
* template name to check for validity
* @ return boolean value indicating if the given name is a valid template
* name */
static public boolean isValidTemplateName ( String name ) { } } | // First do the easy check to make sure that the string isn ' t empty and
// contains only valid characters .
if ( ! validTemplateNameChars . matcher ( name ) . matches ( ) ) { return false ; } // Split the string on slashes and ensure that each one of the terms is
// valid . Cannot be empty or start with a period . ( Above check already
// guarantees that only allowed characters are in the string . )
for ( String t : name . split ( "/" ) ) { if ( "" . equals ( t ) || t . startsWith ( "." ) ) { return false ; } } // If we make it to this point , then the string has passed all of the
// checks and is valid .
return true ; |
public class ManagedComponent { /** * Emits a notification through this manageable . */
@ Override public void emit ( Level level , String message , long sequence ) { } } | if ( _broadcaster != null ) _broadcaster . sendNotification ( new Notification ( level . toString ( ) , _name != null ? _name : this , sequence , message ) ) ; |
public class Job { /** * Execution steps for an execution job , for an Amplify App .
* @ param steps
* Execution steps for an execution job , for an Amplify App . */
public void setSteps ( java . util . Collection < Step > steps ) { } } | if ( steps == null ) { this . steps = null ; return ; } this . steps = new java . util . ArrayList < Step > ( steps ) ; |
public class GenericTransportReceiveListener { /** * Notification that an error occurred when we were expecting to receive
* a response . This method is used to " wake up " any conversations using
* a connection for which an error occurres . At the point this method is
* invoked , the connection will already have been marked " invalid " .
* It is used to notify
* the per conversation receive listener of ( almost ) all error conditions
* encountered on the associated connection .
* @ see ConversationReceiveListener
* @ param exception The exception which occurred .
* @ param segmentType The segment type of the data ( - 1 if not known )
* @ param requestNumber The request number associated with the failing
* request ( - 1 if not known )
* @ param priority The priority associated with the failing request
* ( - 1 if not known ) .
* @ param conversation The conversation ( null if not known ) */
public void errorOccurred ( SIConnectionLostException exception , // F174602
int segmentType , int requestNumber , int priority , Conversation conversation ) { } } | if ( tc . isEntryEnabled ( ) ) SibTr . entry ( this , tc , "errorOccurred" , new Object [ ] { exception , segmentType , requestNumber , priority , conversation } ) ; FFDCFilter . processException ( exception , CLASS_NAME + ".errorOccurred" , CommsConstants . GENERICTRANSPORTRECEIVELISTENER_ERROR_01 , this ) ; if ( tc . isDebugEnabled ( ) ) { Object [ ] debug = { "Segment type : " + segmentType + " (0x" + Integer . toHexString ( segmentType ) + ")" , "Request number: " + requestNumber , "Priority : " + priority } ; SibTr . debug ( tc , "Received an error in the GenericTransportReceiveListener" , debug ) ; SibTr . debug ( tc , "Primary exception:" ) ; SibTr . exception ( tc , exception ) ; } if ( tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "errorOccurred" ) ; |
public class FactoryMultiView { /** * Triangulate two view using the Discrete Linear Transform ( DLT ) with an uncalibrated camera .
* @ see TriangulateProjectiveLinearDLT
* @ return Two view triangulation algorithm */
public static Triangulate2ViewsProjective triangulate2ViewProjective ( @ Nullable ConfigTriangulation config ) { } } | if ( config == null ) config = new ConfigTriangulation ( ) ; switch ( config . type ) { case DLT : return new Wrap2ViewsTriangulateProjectiveDLT ( ) ; } throw new IllegalArgumentException ( "Unknown or unsupported type " + config . type ) ; |
public class DistributedLogServerContext { /** * Compacts the log by size . */
private void compactBySize ( ) { } } | if ( maxLogSize > 0 && journal . size ( ) > maxLogSize ) { JournalSegment < LogEntry > compactSegment = null ; Long compactIndex = null ; for ( JournalSegment < LogEntry > segment : journal . segments ( ) ) { Collection < JournalSegment < LogEntry > > remainingSegments = journal . segments ( segment . lastIndex ( ) + 1 ) ; long remainingSize = remainingSegments . stream ( ) . mapToLong ( JournalSegment :: size ) . sum ( ) ; if ( remainingSize > maxLogSize ) { log . debug ( "Found outsize journal segment {}" , segment . file ( ) . file ( ) ) ; compactSegment = segment ; } else if ( compactSegment != null ) { compactIndex = segment . index ( ) ; break ; } } if ( compactIndex != null ) { log . info ( "Compacting journal by size up to {}" , compactIndex ) ; journal . compact ( compactIndex ) ; } } |
public class ValueFactory { /** * 处理类型是数组类型
* < p > Function : createValueArray < / p >
* < p > Description : < / p >
* @ author acexy @ thankjava . com
* @ date 2015-1-27 下午4:30:29
* @ version 1.0
* @ param targetFieldType
* @ param targetObject
* @ param originValue
* @ return */
static Object createValueArray ( Field targetField , Class < ? > targetFieldType , Object targetObject , Object originValue ) { } } | if ( originValue == null ) { return null ; } // 获取数组实际容纳的的类型
Class < ? > proxyType = targetFieldType . getComponentType ( ) ; Object [ ] originArray = ( Object [ ] ) originValue ; if ( originArray . length == 0 ) { return null ; } Object [ ] targetArray = ( Object [ ] ) Array . newInstance ( proxyType , originArray . length ) ; for ( int i = 0 ; i < originArray . length ; i ++ ) { targetArray [ i ] = ValueCast . createValueCore ( targetField , proxyType , targetObject , originArray [ i ] ) ; } return targetArray ; |
public class UrlBuilder { /** * Creates a new URL relative to the base URL provided in the constructor of this class . The new relative URL
* includes the path , query parameters and the internal reference of the { @ link UrlBuilder # baseUrl base URL }
* provided with this class . An additional fragment , as well as additional query parameters can be optionally
* added to the new URL . In addition to the parameters passed to the method as an argument , the supplied
* fragment can also include parameters that will be added to the created URL . The created URL is normalized
* and unencoded before returning it to the caller . The current implementation has the following limitations :
* < ul >
* < li > Arrays are not supported : < tt > q = foo & amp ; q = bar < / tt > will produce an error . < / li >
* < li > Internal references are only supported in the base URL . Any additional reference provided with the
* fragment will be silently ignored : the fragment < tt > / rd # ref < / tt > will be appended to the base URL as
* < tt > / rd < / tt > , ignoring the internal reference . < / li >
* < / ul >
* @ param fragment - optional URL fragment ( may include parameters , but not references ) that will be added
* to the base URL
* @ param params - optional query parameters that will be added to the base URL
* @ return A relative URL created from the base URL provided in the constructor of this class and adding the
* fragment and parameters passed as arguments to this method . */
public String buildRelativeUrl ( final @ Nullable String fragment , final @ Nullable Map < String , String > params ) { } } | String url = null ; final Optional < String > fragment2 = ofNullable ( trimToNull ( fragment ) ) ; try { final Optional < URL > fragmentUrl = ofNullable ( fragment2 . isPresent ( ) ? new URL ( "http://example.com/" + fragment2 . get ( ) ) : null ) ; final URIBuilder uriBuilder = new URIBuilder ( ) ; // add path
uriBuilder . setPath ( new StringBuilder ( ofNullable ( trimToNull ( baseUrl . getPath ( ) ) ) . orElse ( "/" ) ) . append ( fragmentUrl . isPresent ( ) ? "/" + stripEnd ( fragmentUrl . get ( ) . getPath ( ) , "/" ) : "" ) . toString ( ) . replaceAll ( "[/]{2,}" , "/" ) ) ; // add query parameters
if ( isNotBlank ( baseUrl . getQuery ( ) ) ) { uriBuilder . setParameters ( URLEncodedUtils . parse ( baseUrl . getQuery ( ) , defaultCharset ( ) ) ) ; } if ( fragmentUrl . isPresent ( ) && isNotBlank ( fragmentUrl . get ( ) . getQuery ( ) ) ) { URLEncodedUtils . parse ( fragmentUrl . get ( ) . getQuery ( ) , defaultCharset ( ) ) . stream ( ) . forEach ( p -> { uriBuilder . addParameter ( p . getName ( ) , p . getValue ( ) ) ; } ) ; } ofNullable ( params ) . orElse ( emptyMap ( ) ) . entrySet ( ) . stream ( ) . forEach ( p -> { uriBuilder . addParameter ( p . getKey ( ) , p . getValue ( ) ) ; } ) ; // add internal reference
uriBuilder . setFragment ( baseUrl . getRef ( ) ) ; // build relative URL
url = uriBuilder . build ( ) . normalize ( ) . toString ( ) ; } catch ( MalformedURLException | URISyntaxException e ) { throw new IllegalStateException ( new StringBuilder ( "Failed to create relative URL from provided parameters: fragment=" ) . append ( fragment2 . orElse ( "null" ) ) . append ( ", params=" ) . append ( params != null ? params . toString ( ) : "null" ) . toString ( ) , e ) ; } return url ; |
public class CPDefinitionPersistenceImpl { /** * Removes all the cp definitions where uuid = & # 63 ; and companyId = & # 63 ; from the database .
* @ param uuid the uuid
* @ param companyId the company ID */
@ Override public void removeByUuid_C ( String uuid , long companyId ) { } } | for ( CPDefinition cpDefinition : findByUuid_C ( uuid , companyId , QueryUtil . ALL_POS , QueryUtil . ALL_POS , null ) ) { remove ( cpDefinition ) ; } |
public class AbstractTypeVisitor6 { /** * { @ inheritDoc }
* @ implSpec Visits an { @ code IntersectionType } element by calling { @ code
* visitUnknown } .
* @ param t { @ inheritDoc }
* @ param p { @ inheritDoc }
* @ return the result of { @ code visitUnknown }
* @ since 1.8 */
@ Override public R visitIntersection ( IntersectionType t , P p ) { } } | return visitUnknown ( t , p ) ; |
public class CobolComplexTypeFinder { /** * { @ inheritDoc } */
public boolean match ( byte [ ] hostData , int start , int length ) { } } | Cob2ObjectValidator visitor = new Cob2ObjectValidator ( cobolContext , hostData , start , length , stopFieldInclusive ) ; visitor . visit ( cobolComplexType ) ; return visitor . isValid ( ) ; |
public class JoinPoint { /** * Shortcut method to create a JoinPoint waiting for the given synchronization points , start the JoinPoint ,
* and add the given listener to be called when the JoinPoint is unblocked .
* If any synchronization point has an error or is cancelled , the JoinPoint is immediately unblocked .
* If some given synchronization points are null , they are just skipped . */
public static void listenInline ( Runnable listener , ISynchronizationPoint < ? > ... synchPoints ) { } } | JoinPoint < Exception > jp = new JoinPoint < > ( ) ; for ( int i = 0 ; i < synchPoints . length ; ++ i ) if ( synchPoints [ i ] != null ) jp . addToJoin ( synchPoints [ i ] ) ; jp . start ( ) ; jp . listenInline ( listener ) ; |
public class Screenshots { /** * Take screenshot of WebElement / SelenideElement in iframe
* for partially visible WebElement / Selenide horizontal scroll bar will be present
* @ return buffered image */
public static BufferedImage takeScreenShotAsImage ( WebElement iframe , WebElement element ) { } } | return screenshots . takeScreenshotAsImage ( driver ( ) , iframe , element ) ; |
public class TIFFImageWriter { /** * TODO : Candidate util method */
private int computePixelSize ( final SampleModel sampleModel ) { } } | int size = 0 ; for ( int i = 0 ; i < sampleModel . getNumBands ( ) ; i ++ ) { size += sampleModel . getSampleSize ( i ) ; } return size ; |
public class DurableOutputHandler { /** * Create a DurableConfirm reply .
* @ param target The target ME for the message .
* @ param reqID The request ID of the original request message .
* @ param status The status to record in this reply . */
protected static ControlDurableConfirm createDurableConfirm ( MessageProcessor MP , SIBUuid8 target , long reqID , int status ) { } } | if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "createDurableConfirm" , new Object [ ] { MP , target , new Long ( reqID ) , new Integer ( status ) } ) ; ControlDurableConfirm msg = null ; try { // Create and initialize the message
msg = MessageProcessor . getControlMessageFactory ( ) . createNewControlDurableConfirm ( ) ; initializeControlMessage ( MP . getMessagingEngineUuid ( ) , msg , target ) ; // Parameterize for CreateStream
msg . setRequestID ( reqID ) ; msg . setStatus ( status ) ; } catch ( Exception e ) { FFDCFilter . processException ( e , "com.ibm.ws.sib.processor.impl.DurableOutputHandler.createDurableConfirm" , "1:509:1.45.1.1" , DurableOutputHandler . class ) ; SibTr . exception ( tc , e ) ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "createDurableConfirm" , msg ) ; return msg ; |
public class LogicalClock { /** * Increments the clock and updates it using the given timestamp .
* @ param timestamp the timestamp with which to update the clock
* @ return the updated clock time */
public LogicalTimestamp incrementAndUpdate ( LogicalTimestamp timestamp ) { } } | long nextValue = currentTimestamp . value ( ) + 1 ; if ( timestamp . value ( ) > nextValue ) { return update ( timestamp ) ; } return increment ( ) ; |
public class RESTClientEnablerExampleSbb { @ Override public void onResponse ( RESTClientEnablerChildSbbLocalObject child , RESTClientEnablerResponse response ) { } } | String uri = response . getRequest ( ) . getUri ( ) ; RESTClientEnablerRequest . Type type = response . getRequest ( ) . getType ( ) ; HttpResponse httpResponse = response . getHttpResponse ( ) ; if ( httpResponse != null ) { String content = null ; if ( httpResponse . getEntity ( ) != null ) { try { content = EntityUtils . toString ( httpResponse . getEntity ( ) ) ; } catch ( Exception e ) { tracer . severe ( "failed to extract response content" , e ) ; } } tracer . info ( "onResponse. Child '" + child + "', request type '" + type + "', uri '" + uri + "', status '" + httpResponse . getStatusLine ( ) . getStatusCode ( ) + "', response content '" + content + "'" ) ; } else { tracer . info ( "onResponse. Child '" + child + "', request type '" + type + "', uri '" + uri + "', exception '" + response . getExecutionException ( ) + "'" ) ; } |
public class ReviewsImpl { /** * This API adds a transcript file ( text version of all the words spoken in a video ) to a video review . The file should be a valid WebVTT format .
* @ param teamName Your team name .
* @ param reviewId Id of the review .
* @ param vTTfile Transcript file of the video .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the { @ link ServiceResponse } object if successful . */
public Observable < Void > addVideoTranscriptAsync ( String teamName , String reviewId , byte [ ] vTTfile ) { } } | return addVideoTranscriptWithServiceResponseAsync ( teamName , reviewId , vTTfile ) . map ( new Func1 < ServiceResponse < Void > , Void > ( ) { @ Override public Void call ( ServiceResponse < Void > response ) { return response . body ( ) ; } } ) ; |
public class GeometryTools { /** * An average of all 2D bond length values is produced . Bonds which have
* Atom ' s with no coordinates are disregarded .
* See comment for center ( IAtomContainer atomCon , Dimension areaDim , HashMap renderingCoordinates ) for details on coordinate sets
* @ param container The AtomContainer for which the average bond length is to be
* calculated
* @ return the average bond length */
public static double getBondLengthAverage ( IAtomContainer container ) { } } | double bondLengthSum = 0 ; Iterator < IBond > bonds = container . bonds ( ) . iterator ( ) ; int bondCounter = 0 ; while ( bonds . hasNext ( ) ) { IBond bond = bonds . next ( ) ; IAtom atom1 = bond . getBegin ( ) ; IAtom atom2 = bond . getEnd ( ) ; if ( atom1 . getPoint2d ( ) != null && atom2 . getPoint2d ( ) != null ) { bondCounter ++ ; bondLengthSum += getLength2D ( bond ) ; } } return bondLengthSum / bondCounter ; |
public class DataUtil { /** * Lazy mechanism for stream loading
* @ param data file
* @ return lazy stream */
public static HasInputStream lazyFileStream ( final File data ) { } } | return new HasInputStream ( ) { @ Override public InputStream getInputStream ( ) throws IOException { return new FileInputStream ( data ) ; } @ Override public long writeContent ( OutputStream outputStream ) throws IOException { return copyStream ( getInputStream ( ) , outputStream ) ; } } ; |
public class ClassLoaderWrapper { /** * { @ inheritDoc } */
@ Override protected synchronized Class < ? > loadClass ( String name , boolean resolve ) throws ClassNotFoundException { } } | // Check if class is in the loaded classes cache
Class < ? > cachedClass = findLoadedClass ( name ) ; if ( cachedClass != null ) { if ( resolve ) { resolveClass ( cachedClass ) ; } return cachedClass ; } // Check parent class loaders
for ( int i = 0 ; i < parents . length ; i ++ ) { ClassLoader parent = parents [ i ] ; try { Class < ? > clazz = parent . loadClass ( name ) ; if ( resolve ) { resolveClass ( clazz ) ; } return clazz ; } catch ( ClassNotFoundException ignored ) { // this parent didn ' t have the class ; try the next one
} } throw new ClassNotFoundException ( name ) ; |
public class HttpRequestUtil { /** * Method to open / establish a URLConnection .
* @ param url The URL to connect to .
* @ param proxy The proxy configuration .
* @ return { @ link URLConnection }
* @ throws IOException */
private static URLConnection getConnection ( String url , final ProxyConfig proxy ) throws IOException { } } | URLConnection conn = null ; if ( proxy != null && proxy . getProxyUser ( ) != null ) { Authenticator . setDefault ( new Authenticator ( ) { @ Override protected PasswordAuthentication getPasswordAuthentication ( ) { return new PasswordAuthentication ( proxy . getProxyUser ( ) , proxy . getProxyPassword ( ) . toCharArray ( ) ) ; } } ) ; } if ( proxy != null ) { Proxy thisProxy = new Proxy ( proxy . getProxyType ( ) , new InetSocketAddress ( proxy . getProxyHost ( ) , proxy . getProxyPort ( ) ) ) ; conn = new URL ( url ) . openConnection ( thisProxy ) ; } else { conn = new URL ( url ) . openConnection ( ) ; } return conn ; |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.