signature
stringlengths 43
39.1k
| implementation
stringlengths 0
450k
|
|---|---|
public class DepTreeBuilder { /** * / * ( non - Javadoc )
* @ see java . util . concurrent . Callable # call ( ) */
public Result call ( ) throws Exception { } }
|
final String sourceMethod = "call" ; // $ NON - NLS - 1 $
IResourceVisitor visitor = new IResourceVisitor ( ) { /* ( non - Javadoc )
* @ see com . ibm . jaggr . service . modules . ResourceVisitor # visitResource ( com . ibm . jaggr . service . modules . Resource ) */
@ Override public boolean visitResource ( Resource resource , String pathname ) throws IOException { if ( pathname == null ) { pathname = "" ; // $ NON - NLS - 1 $
} int idx = pathname . lastIndexOf ( "/" ) ; // $ NON - NLS - 1 $
String resname = idx == - 1 ? pathname : pathname . substring ( idx + 1 ) ; if ( resource . isFolder ( ) ) { if ( ! resname . startsWith ( "." ) ) { // $ NON - NLS - 1 $
root . createOrGet ( pathname , resource . getURI ( ) ) ; return true ; } else { return false ; } } if ( ! resource . getURI ( ) . getPath ( ) . endsWith ( ".js" ) ) { // $ NON - NLS - 1 $
// non - JavaScript resource
idx = resname . lastIndexOf ( "." ) ; // $ NON - NLS - 1 $
String extension = idx == - 1 ? "" : resname . substring ( idx + 1 ) ; // $ NON - NLS - 1 $
if ( nonJSExtensions . contains ( extension ) ) { DepTreeNode node = root . createOrGet ( pathname , resource . getURI ( ) ) ; node . setDependencies ( null , null , null , resource . lastModified ( ) , resource . lastModified ( ) ) ; } return false ; } // strip off the . js extension
if ( pathname . endsWith ( ".js" ) ) { // $ NON - NLS - 1 $
pathname = pathname . substring ( 0 , pathname . length ( ) - 3 ) ; } DepTreeNode node = ( pathname . length ( ) > 0 ) ? root . createOrGet ( pathname , resource . getURI ( ) ) : root ; DepTreeNode cachedNode = null ; if ( cached != null ) { cachedNode = ( pathname . length ( ) > 0 ) ? cached . getDescendent ( pathname ) : cached ; } if ( cachedNode != null ) { node . setDependencies ( cachedNode . getDefineDepArray ( ) , cachedNode . getRequireDepArray ( ) , cachedNode . getDependentFeatures ( ) , cachedNode . lastModified ( ) , cachedNode . lastModifiedDep ( ) ) ; } /* * The path is for a javascript module . Check the timestamp for the
* node against the timestamp for the file object to see if the file
* has changed and we need to re - parse it . */
if ( node . lastModified ( ) != resource . lastModified ( ) ) { // File has changed , or is new . Submit an async parser job .
parserCs . submit ( new DepParser ( node , resource . newResource ( aggregator ) , compilerOptionsMap ) ) ; parserCount . incrementAndGet ( ) ; } return true ; } } ; /* * Process the path . The treeWalker method will queue files
* to the parser completion service to parse javascript
* files in order to read the require list from the AMD
* define ( ) function , and increments parserCount for each
* file queued . */
IResource resource = aggregator . newResource ( uri ) ; if ( resource . exists ( ) ) { resource . walkTree ( visitor ) ; } /* * Call treeWalker again , this time to add the javascript
* module with the same pathname , if it exists . */
String name = uri . getPath ( ) ; if ( ! name . endsWith ( ".js" ) ) { // $ NON - NLS - 1 $
if ( name . endsWith ( "/" ) ) { // $ NON - NLS - 1 $
name = name . substring ( 0 , name . length ( ) - 1 ) ; } int idx = name . lastIndexOf ( "/" ) ; // $ NON - NLS - 1 $
if ( idx != - 1 ) { name = name . substring ( idx + 1 ) ; } name += ".js" ; // $ NON - NLS - 1 $
resource = aggregator . newResource ( uri . resolve ( ( uri . getPath ( ) . endsWith ( "/" ) ? "../" : "./" ) + name ) ) ; // $ NON - NLS - 1 $ / / $ NON - NLS - 2 $ / / $ NON - NLS - 3 $
if ( resource . exists ( ) ) { visitor . visitResource ( resource . asVisitorResource ( ) , "" ) ; // $ NON - NLS - 1 $
} } // Record the count of files queue
int totalCount = parserCount . get ( ) ; // Pull the completed parser tasks from the completion queue
// until all files have been parsed
while ( parserCount . decrementAndGet ( ) >= 0 ) { try { SignalUtil . take ( parserCs , sourceClass , sourceMethod ) . get ( ) ; } catch ( Exception e ) { if ( log . isLoggable ( Level . SEVERE ) ) log . logp ( Level . SEVERE , sourceClass , sourceMethod , e . getMessage ( ) , e ) ; } } // Return the buildReader
return new Result ( uri . toString ( ) , totalCount ) ;
|
public class SuperCfTemplate { /** * Counts columns in the specified range of a super column family
* @ param key
* @ param start
* @ param end
* @ param max
* @ return */
public int countColumns ( K key , SN start , SN end , int max ) { } }
|
SuperCountQuery < K , SN > query = HFactory . createSuperCountQuery ( keyspace , keySerializer , topSerializer ) ; query . setKey ( key ) ; query . setColumnFamily ( columnFamily ) ; query . setRange ( start , end , max ) ; return query . execute ( ) . get ( ) ;
|
public class Hours { /** * Returns a new instance with the specified number of hours added .
* This instance is immutable and unaffected by this method call .
* @ param hours the amount of hours to add , may be negative
* @ return the new period plus the specified number of hours
* @ throws ArithmeticException if the result overflows an int */
public Hours plus ( int hours ) { } }
|
if ( hours == 0 ) { return this ; } return Hours . hours ( FieldUtils . safeAdd ( getValue ( ) , hours ) ) ;
|
public class Agg { /** * Get a { @ link Collector } that calculates the < code > MEDIAN ( ) < / code > function given a specific ordering . */
public static < T , U extends Comparable < ? super U > > Collector < T , ? , Optional < U > > median ( Function < ? super T , ? extends U > function ) { } }
|
return percentile ( 0.5 , function ) ;
|
public class ListTagsRequestMarshaller { /** * Marshall the given parameter object . */
public void marshall ( ListTagsRequest listTagsRequest , ProtocolMarshaller protocolMarshaller ) { } }
|
if ( listTagsRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( listTagsRequest . getResourceIdList ( ) , RESOURCEIDLIST_BINDING ) ; protocolMarshaller . marshall ( listTagsRequest . getNextToken ( ) , NEXTTOKEN_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
|
public class BeginSegmentCommandImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
@ Override public void eSet ( int featureID , Object newValue ) { } }
|
switch ( featureID ) { case AfplibPackage . BEGIN_SEGMENT_COMMAND__LENGTH : setLENGTH ( ( Integer ) newValue ) ; return ; case AfplibPackage . BEGIN_SEGMENT_COMMAND__NAME : setNAME ( ( Integer ) newValue ) ; return ; case AfplibPackage . BEGIN_SEGMENT_COMMAND__FLAG1 : setFLAG1 ( ( Integer ) newValue ) ; return ; case AfplibPackage . BEGIN_SEGMENT_COMMAND__FLAG2 : setFLAG2 ( ( Integer ) newValue ) ; return ; case AfplibPackage . BEGIN_SEGMENT_COMMAND__SEGL : setSEGL ( ( Integer ) newValue ) ; return ; case AfplibPackage . BEGIN_SEGMENT_COMMAND__PSNAME : setPSNAME ( ( String ) newValue ) ; return ; } super . eSet ( featureID , newValue ) ;
|
public class JCRDateFormat { /** * Parse string using possible formats list .
* @ param dateString
* - date string
* @ return - calendar
* @ throws ValueFormatException */
public static Calendar parse ( String dateString ) throws ValueFormatException { } }
|
try { return ISO8601 . parseEx ( dateString ) ; } catch ( ParseException e ) { throw new ValueFormatException ( "Can not parse date from [" + dateString + "]" , e ) ; } catch ( NumberFormatException e ) { throw new ValueFormatException ( "Can not parse date from [" + dateString + "]" , e ) ; }
|
public class JKNumbersUtil { /** * Fix amount .
* @ param value the value
* @ return the double */
public static double fixAmount ( final double value ) { } }
|
final BigDecimal b1 = new BigDecimal ( value ) ; final BigDecimal b2 = b1 . setScale ( 3 , BigDecimal . ROUND_HALF_UP ) ; return b2 . doubleValue ( ) ;
|
public class PartitionStrategy { /** * Return a { @ link PartitionStrategy } for subpartitions starting at the given
* index . */
PartitionStrategy getSubpartitionStrategy ( int startIndex ) { } }
|
if ( startIndex == 0 ) { return this ; } if ( startIndex >= fieldPartitioners . size ( ) ) { return null ; } return new PartitionStrategy ( fieldPartitioners . subList ( startIndex , fieldPartitioners . size ( ) ) ) ;
|
public class PlotTab { /** * GEN - LAST : event _ jButtonResetGnuPActionPerformed */
private void jButtonGnuoPathActionPerformed ( java . awt . event . ActionEvent evt ) { } }
|
// GEN - FIRST : event _ jButtonGnuoPathActionPerformed
this . jTextFieldGNUPath . setText ( getDirectory ( "open" , JFileChooser . DIRECTORIES_ONLY ) ) ;
|
public class ModelsImpl { /** * Deletes a composite entity extractor from the application .
* @ param appId The application ID .
* @ param versionId The version ID .
* @ param cEntityId The composite entity extractor ID .
* @ param serviceCallback the async ServiceCallback to handle successful and failed responses .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the { @ link ServiceFuture } object */
public ServiceFuture < OperationStatus > deleteCompositeEntityAsync ( UUID appId , String versionId , UUID cEntityId , final ServiceCallback < OperationStatus > serviceCallback ) { } }
|
return ServiceFuture . fromResponse ( deleteCompositeEntityWithServiceResponseAsync ( appId , versionId , cEntityId ) , serviceCallback ) ;
|
public class BitmapUtils { /** * Get width and height of the bitmap specified with the byte array .
* @ param byteArray the bitmap itself .
* @ param offset offset index .
* @ param length array length .
* @ return the size . */
public static Point getSize ( byte [ ] byteArray , int offset , int length ) { } }
|
BitmapFactory . Options options = new BitmapFactory . Options ( ) ; options . inJustDecodeBounds = true ; BitmapFactory . decodeByteArray ( byteArray , offset , length , options ) ; int width = options . outWidth ; int height = options . outHeight ; return new Point ( width , height ) ;
|
public class AppServiceCertificateOrdersInner { /** * Delete an existing certificate order .
* Delete an existing certificate order .
* @ param resourceGroupName Name of the resource group to which the resource belongs .
* @ param certificateOrderName Name of the certificate order .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ throws CloudException thrown if the request is rejected by server
* @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent */
public void delete ( String resourceGroupName , String certificateOrderName ) { } }
|
deleteWithServiceResponseAsync ( resourceGroupName , certificateOrderName ) . toBlocking ( ) . single ( ) . body ( ) ;
|
public class NetworkUtil { /** * Get the local network info as a string
* @ return return a description of the current network setup of the local host .
* @ throws UnknownHostException
* @ throws SocketException */
public static String dumpLocalNetworkInfo ( ) throws UnknownHostException , SocketException { } }
|
StringBuffer buffer = new StringBuffer ( ) ; InetAddress addr = InetAddress . getLocalHost ( ) ; buffer . append ( "Localhost: " + getAddrInfo ( addr ) + "\n" ) ; Enumeration < NetworkInterface > nifs = NetworkInterface . getNetworkInterfaces ( ) ; buffer . append ( "Network interfaces:\n" ) ; while ( nifs . hasMoreElements ( ) ) { NetworkInterface nif = nifs . nextElement ( ) ; buffer . append ( " - " + getNetworkInterfaceInfo ( nif ) + "\n" ) ; Enumeration < InetAddress > addresses = nif . getInetAddresses ( ) ; while ( addresses . hasMoreElements ( ) ) { addr = addresses . nextElement ( ) ; buffer . append ( " " + getAddrInfo ( addr ) + "\n" ) ; } } return buffer . toString ( ) ;
|
public class LocationImpl { /** * Eagerly compute the stacktrace line from the stackTraceHolder . Storing the Throwable is
* memory - intensive for tests that have large stacktraces and have a lot of invocations on
* mocks . */
private void computeStackTraceInformation ( StackTraceFilter stackTraceFilter , Throwable stackTraceHolder , boolean isInline ) { } }
|
StackTraceElement filtered = stackTraceFilter . filterFirst ( stackTraceHolder , isInline ) ; // there are corner cases where exception can have a null or empty stack trace
// for example , a custom exception can override getStackTrace ( ) method
if ( filtered == null ) { this . stackTraceLine = "-> at <<unknown line>>" ; this . sourceFile = "<unknown source file>" ; } else { this . stackTraceLine = "-> at " + filtered . toString ( ) ; this . sourceFile = filtered . getFileName ( ) ; }
|
public class JavaEncrypt { /** * SHA - 256加密
* @ param string { @ link String }
* @ return { @ link String }
* @ throws NoSuchAlgorithmException 异常
* @ throws UnsupportedEncodingException 异常 */
public static String sha256 ( String string ) throws NoSuchAlgorithmException , UnsupportedEncodingException { } }
|
MessageDigest messageDigest ; messageDigest = MessageDigest . getInstance ( "SHA-256" ) ; byte [ ] hash = messageDigest . digest ( string . getBytes ( "UTF-8" ) ) ; return String . valueOf ( Hex . encodeHex ( hash ) ) ;
|
public class RegexUrlMapping { /** * Matches the given URI and returns a DefaultUrlMappingInfo instance or null
* @ param uri The URI to match
* @ return A UrlMappingInfo instance or null
* @ see grails . web . mapping . UrlMappingInfo */
public UrlMappingInfo match ( String uri ) { } }
|
for ( Pattern pattern : patterns ) { Matcher m = pattern . matcher ( uri ) ; if ( m . matches ( ) ) { UrlMappingInfo urlInfo = createUrlMappingInfo ( uri , m ) ; if ( urlInfo != null ) { return urlInfo ; } } } return null ;
|
public class ObjectFactory { /** * Create an instance of { @ link Project . Calendars . Calendar . WeekDays . WeekDay } */
public Project . Calendars . Calendar . WeekDays . WeekDay createProjectCalendarsCalendarWeekDaysWeekDay ( ) { } }
|
return new Project . Calendars . Calendar . WeekDays . WeekDay ( ) ;
|
public class KeyrefModule { /** * Adjust key targets per rewrites */
List < ResolveTask > adjustResourceRenames ( final List < ResolveTask > renames ) { } }
|
final Map < KeyScope , List < ResolveTask > > scopes = renames . stream ( ) . collect ( Collectors . groupingBy ( rt -> rt . scope ) ) ; final List < ResolveTask > res = new ArrayList < > ( ) ; for ( final Map . Entry < KeyScope , List < ResolveTask > > group : scopes . entrySet ( ) ) { final KeyScope scope = group . getKey ( ) ; final List < ResolveTask > tasks = group . getValue ( ) ; final Map < URI , URI > rewrites = tasks . stream ( ) // FIXME this should be filtered out earlier
. filter ( t -> t . out != null ) . collect ( toMap ( t -> t . in . uri , t -> t . out . uri ) ) ; final KeyScope resScope = rewriteScopeTargets ( scope , rewrites ) ; tasks . stream ( ) . map ( t -> new ResolveTask ( resScope , t . in , t . out ) ) . forEach ( res :: add ) ; } return res ;
|
public class RandomVariableAAD { /** * / * ( non - Javadoc )
* @ see net . finmath . stochastic . RandomVariable # getQuantileExpectation ( double , double ) */
@ Override public double getQuantileExpectation ( double quantileStart , double quantileEnd ) { } }
|
return ( ( RandomVariableAAD ) getRandomVariableInterface ( ) ) . getRandomVariableInterface ( ) . getQuantileExpectation ( quantileStart , quantileEnd ) ;
|
public class EmbedBuilder { /** * Sets the Color of the embed .
* < a href = " http : / / i . imgur . com / 2YnxnRM . png " target = " _ blank " > Example < / a >
* @ param color
* The { @ link java . awt . Color Color } of the embed
* or { @ code null } to use no color
* @ return the builder after the color has been set
* @ see # setColor ( int ) */
public EmbedBuilder setColor ( Color color ) { } }
|
this . color = color == null ? Role . DEFAULT_COLOR_RAW : color . getRGB ( ) ; return this ;
|
public class DSLMapParser { /** * src / main / resources / org / drools / compiler / lang / dsl / DSLMap . g : 247:1 : variable _ reference _ expr : ( LITERAL | EQUALS ) + ; */
public final DSLMapParser . variable_reference_expr_return variable_reference_expr ( ) throws RecognitionException { } }
|
DSLMapParser . variable_reference_expr_return retval = new DSLMapParser . variable_reference_expr_return ( ) ; retval . start = input . LT ( 1 ) ; Object root_0 = null ; Token set36 = null ; Object set36_tree = null ; try { // src / main / resources / org / drools / compiler / lang / dsl / DSLMap . g : 248:5 : ( ( LITERAL | EQUALS ) + )
// src / main / resources / org / drools / compiler / lang / dsl / DSLMap . g : 248:8 : ( LITERAL | EQUALS ) +
{ root_0 = ( Object ) adaptor . nil ( ) ; // src / main / resources / org / drools / compiler / lang / dsl / DSLMap . g : 248:8 : ( LITERAL | EQUALS ) +
int cnt17 = 0 ; loop17 : while ( true ) { int alt17 = 2 ; int LA17_0 = input . LA ( 1 ) ; if ( ( LA17_0 == EQUALS || LA17_0 == LITERAL ) ) { alt17 = 1 ; } switch ( alt17 ) { case 1 : // src / main / resources / org / drools / compiler / lang / dsl / DSLMap . g :
{ set36 = input . LT ( 1 ) ; if ( input . LA ( 1 ) == EQUALS || input . LA ( 1 ) == LITERAL ) { input . consume ( ) ; if ( state . backtracking == 0 ) adaptor . addChild ( root_0 , ( Object ) adaptor . create ( set36 ) ) ; state . errorRecovery = false ; state . failed = false ; } else { if ( state . backtracking > 0 ) { state . failed = true ; return retval ; } MismatchedSetException mse = new MismatchedSetException ( null , input ) ; throw mse ; } } break ; default : if ( cnt17 >= 1 ) break loop17 ; if ( state . backtracking > 0 ) { state . failed = true ; return retval ; } EarlyExitException eee = new EarlyExitException ( 17 , input ) ; throw eee ; } cnt17 ++ ; } } retval . stop = input . LT ( - 1 ) ; if ( state . backtracking == 0 ) { retval . tree = ( Object ) adaptor . rulePostProcessing ( root_0 ) ; adaptor . setTokenBoundaries ( retval . tree , retval . start , retval . stop ) ; } } catch ( RecognitionException re ) { reportError ( re ) ; recover ( input , re ) ; retval . tree = ( Object ) adaptor . errorNode ( input , retval . start , input . LT ( - 1 ) , re ) ; } finally { // do for sure before leaving
} return retval ;
|
public class snmptrap { /** * Use this API to delete snmptrap of given name . */
public static base_response delete ( nitro_service client , String trapclass ) throws Exception { } }
|
snmptrap deleteresource = new snmptrap ( ) ; deleteresource . trapclass = trapclass ; return deleteresource . delete_resource ( client ) ;
|
public class DbPreparedStatement { /** * Sets boolean value for all fields matched by name . */
@ NotNull public DbPreparedStatement < T > set ( @ NotNull String name , boolean value ) throws SQLException { } }
|
for ( int i : getIndexes ( name ) ) { statement . setBoolean ( i , value ) ; } return this ;
|
public class Parser { /** * Parse string to check presence of RETURNING keyword regardless of case .
* @ param query char [ ] of the query statement
* @ param offset position of query to start checking
* @ return boolean indicates presence of word */
public static boolean parseReturningKeyword ( final char [ ] query , int offset ) { } }
|
if ( query . length < ( offset + 9 ) ) { return false ; } return ( query [ offset ] | 32 ) == 'r' && ( query [ offset + 1 ] | 32 ) == 'e' && ( query [ offset + 2 ] | 32 ) == 't' && ( query [ offset + 3 ] | 32 ) == 'u' && ( query [ offset + 4 ] | 32 ) == 'r' && ( query [ offset + 5 ] | 32 ) == 'n' && ( query [ offset + 6 ] | 32 ) == 'i' && ( query [ offset + 7 ] | 32 ) == 'n' && ( query [ offset + 8 ] | 32 ) == 'g' ;
|
public class InfoWindow { /** * this destroys the window and all references to views */
public void onDetach ( ) { } }
|
close ( ) ; if ( mView != null ) mView . setTag ( null ) ; mView = null ; mMapView = null ; if ( Configuration . getInstance ( ) . isDebugMode ( ) ) Log . d ( IMapView . LOGTAG , "Marked detached" ) ;
|
public class ElasticHashinator { /** * Update from raw config bytes .
* token - 1 / partition - 1
* token - 2 / partition - 2
* tokens are 8 bytes
* @ param configBytes raw config data
* @ return token / partition map */
private Pair < Long , Integer > updateRaw ( byte configBytes [ ] ) { } }
|
ByteBuffer buf = ByteBuffer . wrap ( configBytes ) ; int numEntries = buf . getInt ( ) ; if ( numEntries < 0 ) { throw new RuntimeException ( "Bad elastic hashinator config" ) ; } final int bytes = 8 * numEntries ; long tokens = Bits . unsafe . allocateMemory ( bytes ) ; trackAllocatedHashinatorBytes ( bytes ) ; int lastToken = Integer . MIN_VALUE ; for ( int ii = 0 ; ii < numEntries ; ii ++ ) { long ptr = tokens + ( ii * 8 ) ; final int token = buf . getInt ( ) ; Preconditions . checkArgument ( token >= lastToken ) ; lastToken = token ; Bits . unsafe . putInt ( ptr , token ) ; final int partitionId = buf . getInt ( ) ; Bits . unsafe . putInt ( ptr + 4 , partitionId ) ; } return Pair . of ( tokens , numEntries ) ;
|
public class DialogRootView { /** * Adapts the visibility of the top and bottom divider , depending on whether the scrollable area
* is scrolled to the top / bottom , or not .
* @ param scrolledToTop
* True , if the scrollable area is scrolled to the top , false otherwise
* @ param scrolledToBottom
* True , if the scrollable area is scrolled to the bottom , false otherwise
* @ param animate
* True , if the visibility should be changed in an animated manner , false otherwise */
private void adaptDividerVisibilities ( final boolean scrolledToTop , final boolean scrolledToBottom , final boolean animate ) { } }
|
if ( topDivider != null && ! topDivider . isVisibleByDefault ( ) ) { topDivider . setVisibility ( scrolledToTop || ! showDividersOnScroll ? View . INVISIBLE : View . VISIBLE , animate ) ; } if ( bottomDivider != null && ! bottomDivider . isVisibleByDefault ( ) ) { bottomDivider . setVisibility ( scrolledToBottom || ! showDividersOnScroll ? View . INVISIBLE : View . VISIBLE , animate ) ; }
|
public class JPEGLosslessDecoderWrapper { /** * Converts the decoded buffer into a BufferedImage .
* precision : 8 bit , componentCount = 1
* @ param decoded data buffer
* @ param width of the image
* @ param height of the image
* @ return a BufferedImage . TYPE _ BYTE _ GRAY */
private BufferedImage to8Bit1ComponentGrayScale ( int [ ] [ ] decoded , int width , int height ) { } }
|
BufferedImage image = new BufferedImage ( width , height , BufferedImage . TYPE_BYTE_GRAY ) ; byte [ ] imageBuffer = ( ( DataBufferByte ) image . getRaster ( ) . getDataBuffer ( ) ) . getData ( ) ; for ( int i = 0 ; i < imageBuffer . length ; i ++ ) { imageBuffer [ i ] = ( byte ) decoded [ 0 ] [ i ] ; } return image ;
|
public class CommerceDiscountPersistenceImpl { /** * Returns the commerce discount where uuid = & # 63 ; and groupId = & # 63 ; or throws a { @ link NoSuchDiscountException } if it could not be found .
* @ param uuid the uuid
* @ param groupId the group ID
* @ return the matching commerce discount
* @ throws NoSuchDiscountException if a matching commerce discount could not be found */
@ Override public CommerceDiscount findByUUID_G ( String uuid , long groupId ) throws NoSuchDiscountException { } }
|
CommerceDiscount commerceDiscount = fetchByUUID_G ( uuid , groupId ) ; if ( commerceDiscount == null ) { StringBundler msg = new StringBundler ( 6 ) ; msg . append ( _NO_SUCH_ENTITY_WITH_KEY ) ; msg . append ( "uuid=" ) ; msg . append ( uuid ) ; msg . append ( ", groupId=" ) ; msg . append ( groupId ) ; msg . append ( "}" ) ; if ( _log . isDebugEnabled ( ) ) { _log . debug ( msg . toString ( ) ) ; } throw new NoSuchDiscountException ( msg . toString ( ) ) ; } return commerceDiscount ;
|
public class M3UAShellExecutor { /** * m3ua as create < as - name > < AS | SGW | IPSP > mode < SE | DE > ipspType < client | server > rc < routing - context > traffic - mode
* < traffic mode > min - asp < minimum asp active for TrafficModeType . Loadshare > network - appearance < network appearance >
* stackname < stack - name >
* @ param args
* @ return */
private String createAs ( String [ ] args ) throws Exception { } }
|
if ( args . length < 5 || args . length > 19 ) { return M3UAOAMMessages . INVALID_COMMAND ; } // Create new Rem AS
String asName = args [ 3 ] ; if ( asName == null ) { return M3UAOAMMessages . INVALID_COMMAND ; } Functionality functionlaity = Functionality . getFunctionality ( args [ 4 ] ) ; ExchangeType exchangeType = null ; IPSPType ipspType = null ; RoutingContext rc = null ; TrafficModeType trafficModeType = null ; NetworkAppearance na = null ; if ( functionlaity == null ) { return M3UAOAMMessages . INVALID_COMMAND ; } int count = 5 ; int minAspActiveForLoadbalance = 1 ; while ( count < args . length ) { String key = args [ count ++ ] ; if ( key == null ) { return M3UAOAMMessages . INVALID_COMMAND ; } if ( key . equals ( "mode" ) ) { exchangeType = ExchangeType . getExchangeType ( args [ count ++ ] ) ; if ( exchangeType == null ) { return M3UAOAMMessages . INVALID_COMMAND ; } } else if ( key . equals ( "ipspType" ) ) { ipspType = IPSPType . getIPSPType ( args [ count ++ ] ) ; } else if ( key . equals ( "rc" ) ) { long rcLong = Long . parseLong ( args [ count ++ ] ) ; rc = parameterFactory . createRoutingContext ( new long [ ] { rcLong } ) ; } else if ( key . equals ( "traffic-mode" ) ) { trafficModeType = getTrafficModeType ( args [ count ++ ] ) ; } else if ( key . equals ( "network-appearance" ) ) { na = parameterFactory . createNetworkAppearance ( Long . parseLong ( args [ count ++ ] ) ) ; } else if ( key . equals ( "min-asp" ) ) { minAspActiveForLoadbalance = Integer . parseInt ( args [ count ++ ] ) ; } else if ( key . equals ( "stackname" ) ) { String m3uaStackName = args [ count ++ ] ; M3UAManagementImpl m3uaManagementImpl = this . m3uaManagements . get ( m3uaStackName ) ; if ( m3uaManagementImpl == null ) { return String . format ( M3UAOAMMessages . NO_M3UA_MANAGEMENT_BEAN_FOR_NAME , m3uaStackName ) ; } this . m3uaManagement = m3uaManagementImpl ; } else { return M3UAOAMMessages . INVALID_COMMAND ; } } this . setDefaultValue ( ) ; As asImpl = this . m3uaManagement . createAs ( asName , functionlaity , exchangeType , ipspType , rc , trafficModeType , minAspActiveForLoadbalance , na ) ; return String . format ( M3UAOAMMessages . CREATE_AS_SUCESSFULL , asImpl . getName ( ) , this . m3uaManagement . getName ( ) ) ;
|
public class CameraEncoder { /** * Called on Encoder thread */
private void handleSetSurfaceTexture ( int textureId ) throws IOException { } }
|
synchronized ( mSurfaceTextureFence ) { if ( mSurfaceTexture != null ) { // We ' re hot - swapping the display EGLContext after
// creating the initial SurfaceTexture for camera display
mInputWindowSurface . makeCurrent ( ) ; mSurfaceTexture . detachFromGLContext ( ) ; // Release the EGLSurface and EGLContext .
mInputWindowSurface . releaseEglSurface ( ) ; mFullScreen . release ( ) ; mEglCore . release ( ) ; // Create a new EGLContext and recreate the window surface .
mEglCore = new EglCore ( mEglSaver . getSavedEGLContext ( ) , EglCore . FLAG_RECORDABLE ) ; mInputWindowSurface . recreate ( mEglCore ) ; mInputWindowSurface . makeCurrent ( ) ; // Create new programs and such for the new context .
mTextureId = textureId ; mFullScreen = new FullFrameRect ( new Texture2dProgram ( Texture2dProgram . ProgramType . TEXTURE_EXT ) ) ; mFullScreen . getProgram ( ) . setTexSize ( mSessionConfig . getVideoWidth ( ) , mSessionConfig . getVideoHeight ( ) ) ; mIncomingSizeUpdated = true ; mSurfaceTexture . attachToGLContext ( mTextureId ) ; // mEglSaver . makeNothingCurrent ( ) ;
} else { // We ' re setting up the initial SurfaceTexture
prepareEncoder ( mEglSaver . getSavedEGLContext ( ) , mSessionConfig . getVideoWidth ( ) , mSessionConfig . getVideoHeight ( ) , mSessionConfig . getVideoBitrate ( ) , mSessionConfig . getMuxer ( ) ) ; mTextureId = textureId ; mSurfaceTexture = new SurfaceTexture ( mTextureId ) ; if ( VERBOSE ) Log . i ( TAG + "-SurfaceTexture" , " SurfaceTexture created. pre setOnFrameAvailableListener" ) ; mSurfaceTexture . setOnFrameAvailableListener ( this ) ; openAndAttachCameraToSurfaceTexture ( ) ; mReadyForFrames = true ; } }
|
public class Scene { /** * Moves the layer to the top of the layers stack in this scene .
* @ param layer */
@ Override public final Scene moveToTop ( final Layer layer ) { } }
|
if ( ( null != layer ) && ( LienzoCore . IS_CANVAS_SUPPORTED ) ) { final int size = getElement ( ) . getChildCount ( ) ; if ( size < 2 ) { return this ; } final DivElement element = layer . getElement ( ) ; getElement ( ) . removeChild ( element ) ; getElement ( ) . appendChild ( element ) ; final NFastArrayList < Layer > layers = getChildNodes ( ) ; if ( null != layers ) { layers . moveToTop ( layer ) ; } } return this ;
|
public class GrapesEmailSender { /** * default values */
public Properties getDefaultSmtpProperties ( ) { } }
|
final Properties defaults = new Properties ( ) ; defaults . put ( MAIL_SMTP_AUTH , false ) ; defaults . put ( MAIL_SMTP_STARTTLS_ENABLE , false ) ; return defaults ;
|
public class EditsVisitor { /** * Convenience shortcut method to parse a specific token type */
public LongToken visitLong ( EditsElement e ) throws IOException { } }
|
return ( LongToken ) visit ( tokenizer . read ( new LongToken ( e ) ) ) ;
|
public class ContainerOnlySwarm { /** * Main entry - point .
* @ param args Ignored .
* @ throws Exception if an error occurs . */
public static void main ( String ... args ) throws Exception { } }
|
if ( System . getProperty ( "boot.module.loader" ) == null ) { System . setProperty ( "boot.module.loader" , "org.wildfly.swarm.bootstrap.modules.BootModuleLoader" ) ; } Module bootstrap = Module . getBootModuleLoader ( ) . loadModule ( ModuleIdentifier . create ( "swarm.application" ) ) ; ServiceLoader < ContainerFactory > factory = bootstrap . loadService ( ContainerFactory . class ) ; Iterator < ContainerFactory > factoryIter = factory . iterator ( ) ; if ( ! factoryIter . hasNext ( ) ) { simpleMain ( args ) ; } else { factoryMain ( factoryIter . next ( ) , args ) ; }
|
public class ParseUtil { /** * between first and last , inclusive */
private static long lowMask ( char first , char last ) { } }
|
long m = 0 ; int f = Math . max ( Math . min ( first , 63 ) , 0 ) ; int l = Math . max ( Math . min ( last , 63 ) , 0 ) ; for ( int i = f ; i <= l ; i ++ ) m |= 1L << i ; return m ;
|
public class ConfigurationUtils { /** * Fetches a value specified by key
* @ param map XPP3 map equivalent
* @ param key navigation key
* @ param defaultValue Default value if no such key exists
* @ return List representation of the value */
static List < String > valueAsStringList ( Map < String , Object > map , Key key , List < String > defaultValue ) { } }
|
Validate . notNullOrEmpty ( key . key , "Key for plugin configuration must be set" ) ; if ( map . containsKey ( key . key ) ) { Object rawMapOrObject = map . get ( key . key ) ; // handles non - nested content
if ( key . subKey == null ) { if ( rawMapOrObject == null ) { return defaultValue ; } else { return tokenize ( rawMapOrObject , key . delimiter ) ; } } // go for nested content
if ( rawMapOrObject == null ) { return defaultValue ; } else if ( ! ( rawMapOrObject instanceof Map ) ) { return Arrays . asList ( rawMapOrObject . toString ( ) ) ; } // 1 / we can either have < excludes > foo , bar < / excludes >
// 2 / or < excludes > < exclude > foo < / exclude > < exclude > bar < / exclude > < / excludes >
@ SuppressWarnings ( "unchecked" ) Map < String , Object > subMap = ( Map < String , Object > ) rawMapOrObject ; Object nestedRaw = subMap . get ( key . subKey ) ; if ( nestedRaw == null ) { return defaultValue ; } // format 2/
else if ( nestedRaw instanceof Iterable < ? > ) { List < String > list = new ArrayList < String > ( ) ; for ( Object nested : ( Iterable < ? > ) nestedRaw ) { list . addAll ( tokenize ( nested , key . delimiter ) ) ; } return list ; } // format 1/
else { return tokenize ( nestedRaw , key . delimiter ) ; } } return defaultValue ;
|
public class Parse { /** * Replaces the child at the specified index with a new child with the
* specified label .
* @ param index
* The index of the child to be replaced .
* @ param label
* The label to be assigned to the new child . */
public void setChild ( final int index , final String label ) { } }
|
final Parse newChild = ( Parse ) this . parts . get ( index ) . clone ( ) ; newChild . setLabel ( label ) ; this . parts . set ( index , newChild ) ;
|
public class Do { /** * < div color = ' red ' style = " font - size : 24px ; color : red " > < b > < i > < u > JCYPHER < / u > < / i > < / b > < / div >
* < div color = ' red ' style = " font - size : 18px ; color : red " > < i > create a FOREACH expression in the DO part of another FOREACH expression < / i > < / div >
* < div color = ' red ' style = " font - size : 18px ; color : red " > < i > Use Factory Class < b > F < / b > to create FOREACH Expressions < / i > < / div >
* < div color = ' red ' style = " font - size : 18px ; color : red " > < i > e . g . . . . < b > FOR _ EACH ( F < / b > . element ( " n " ) . . . ) < / i > < / div >
* < br / > */
public DoConcat FOR_EACH ( DoConcat F ) { } }
|
ASTNode clause = APIObjectAccess . getAstNode ( F ) ; clause . setClauseType ( ClauseType . FOREACH ) ; return createConcat ( clause ) ;
|
public class GeometryTools { /** * Return the RMSD between the 2 aligned molecules .
* @ param firstAtomContainer the ( largest ) first aligned AtomContainer which is the reference
* @ param secondAtomContainer the second aligned AtomContainer
* @ param mappedAtoms Map : a Map of the mapped atoms
* @ param Coords3d boolean : true if molecules has 3D coords , false if molecules has 2D coords
* @ return double : the value of the RMSD
* @ exception CDKException if there is an error in getting mapped atoms */
public static double getAllAtomRMSD ( IAtomContainer firstAtomContainer , IAtomContainer secondAtomContainer , Map < Integer , Integer > mappedAtoms , boolean Coords3d ) throws CDKException { } }
|
// logger . debug ( " * * * * GT getAllAtomRMSD * * * * " ) ;
double sum = 0 ; double RMSD ; Iterator < Integer > firstAtoms = mappedAtoms . keySet ( ) . iterator ( ) ; int firstAtomNumber ; int secondAtomNumber ; int n = 0 ; while ( firstAtoms . hasNext ( ) ) { firstAtomNumber = firstAtoms . next ( ) ; try { secondAtomNumber = mappedAtoms . get ( firstAtomNumber ) ; IAtom firstAtom = firstAtomContainer . getAtom ( firstAtomNumber ) ; if ( Coords3d ) { sum = sum + Math . pow ( firstAtom . getPoint3d ( ) . distance ( secondAtomContainer . getAtom ( secondAtomNumber ) . getPoint3d ( ) ) , 2 ) ; n ++ ; } else { sum = sum + Math . pow ( firstAtom . getPoint2d ( ) . distance ( secondAtomContainer . getAtom ( secondAtomNumber ) . getPoint2d ( ) ) , 2 ) ; n ++ ; } } catch ( Exception ex ) { throw new CDKException ( ex . getMessage ( ) , ex ) ; } } RMSD = Math . sqrt ( sum / n ) ; return RMSD ;
|
public class TypeDiffComputer { /** * Determine if there any differences between the methods supplied . A MethodDelta object is built to record any
* differences and stored against the type delta .
* @ param oMethod ' old ' method
* @ param nMethod ' new ' method
* @ param td the type delta where changes are currently being accumulated */
private static void computeAnyMethodDifferences ( MethodNode oMethod , MethodNode nMethod , TypeDelta td ) { } }
|
MethodDelta md = new MethodDelta ( oMethod . name , oMethod . desc ) ; if ( oMethod . access != nMethod . access ) { md . setAccessChanged ( oMethod . access , nMethod . access ) ; } // TODO annotations
InsnList oInstructions = oMethod . instructions ; InsnList nInstructions = nMethod . instructions ; if ( oInstructions . size ( ) != nInstructions . size ( ) ) { md . setInstructionsChanged ( oInstructions . toArray ( ) , nInstructions . toArray ( ) ) ; } else { // TODO Just interested in constructors right now - should add others
if ( oMethod . name . charAt ( 0 ) == '<' ) { String oInvokeSpecialDescriptor = null ; String nInvokeSpecialDescriptor = null ; int oUninitCount = 0 ; int nUninitCount = 0 ; boolean codeChange = false ; for ( int i = 0 , max = oInstructions . size ( ) ; i < max ; i ++ ) { AbstractInsnNode oInstruction = oInstructions . get ( i ) ; AbstractInsnNode nInstruction = nInstructions . get ( i ) ; if ( ! codeChange ) { if ( ! sameInstruction ( oInstruction , nInstruction ) ) { codeChange = true ; } } if ( oInstruction . getType ( ) == AbstractInsnNode . TYPE_INSN ) { if ( oInstruction . getOpcode ( ) == Opcodes . NEW ) { oUninitCount ++ ; } } if ( nInstruction . getType ( ) == AbstractInsnNode . TYPE_INSN ) { if ( nInstruction . getOpcode ( ) == Opcodes . NEW ) { nUninitCount ++ ; } } if ( oInstruction . getType ( ) == AbstractInsnNode . METHOD_INSN ) { MethodInsnNode mi = ( MethodInsnNode ) oInstruction ; if ( mi . getOpcode ( ) == INVOKESPECIAL && mi . name . equals ( "<init>" ) ) { if ( oUninitCount == 0 ) { // this is the one !
oInvokeSpecialDescriptor = mi . desc ; } else { oUninitCount -- ; } } } if ( nInstruction . getType ( ) == AbstractInsnNode . METHOD_INSN ) { MethodInsnNode mi = ( MethodInsnNode ) nInstruction ; if ( mi . getOpcode ( ) == INVOKESPECIAL && mi . name . equals ( "<init>" ) ) { if ( nUninitCount == 0 ) { // this is the one !
nInvokeSpecialDescriptor = mi . desc ; } else { nUninitCount -- ; } } } } // Has the invokespecial changed ?
if ( oInvokeSpecialDescriptor == null ) { if ( nInvokeSpecialDescriptor != null ) { md . setInvokespecialChanged ( oInvokeSpecialDescriptor , nInvokeSpecialDescriptor ) ; } } else { if ( ! oInvokeSpecialDescriptor . equals ( nInvokeSpecialDescriptor ) ) { md . setInvokespecialChanged ( oInvokeSpecialDescriptor , nInvokeSpecialDescriptor ) ; } } if ( codeChange ) { md . setCodeChanged ( oInstructions . toArray ( ) , nInstructions . toArray ( ) ) ; } } } if ( md . hasAnyChanges ( ) ) { // it needs recording
td . addChangedMethod ( md ) ; }
|
public class DeviceProxy { public DeviceDataHistory [ ] command_history ( String cmdname , int nb ) throws DevFailed { } }
|
return deviceProxyDAO . command_history ( this , cmdname , nb ) ;
|
public class TargetStreamManager { /** * Create a new TargetStream and initialize it with a given completed prefix .
* Always called with streamSet lock
* @ param streamSet
* @ param priority
* @ param reliability
* @ param completedPrefix
* @ return A new TargetStream */
private TargetStream createStream ( StreamSet streamSet , int priority , Reliability reliability , long completedPrefix ) throws SIResourceException { } }
|
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "createStream" , new Object [ ] { streamSet , Integer . valueOf ( priority ) , reliability , Long . valueOf ( completedPrefix ) } ) ; TargetStream stream = null ; stream = createStream ( streamSet , priority , reliability ) ; stream . setCompletedPrefix ( completedPrefix ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "createStream" ) ; return stream ;
|
public class CmsReplaceDialog { /** * Updates the progress bar . < p >
* @ param info the progress info */
protected void updateProgressBar ( CmsUploadProgessInfo info ) { } }
|
switch ( info . getState ( ) ) { case notStarted : break ; case running : m_progressInfo . setProgress ( info ) ; stopLoadingAnimation ( ) ; break ; case finished : m_progressInfo . finish ( ) ; m_mainPanel . displayDialogInfo ( Messages . get ( ) . key ( Messages . GUI_UPLOAD_INFO_FINISHING_0 ) , false ) ; startLoadingAnimation ( Messages . get ( ) . key ( Messages . GUI_UPLOAD_INFO_CREATING_RESOURCES_0 ) , 1500 ) ; break ; default : break ; }
|
public class SqlValidatorImpl { /** * Derives a row - type for INSERT and UPDATE operations .
* @ param table Target table for INSERT / UPDATE
* @ param targetColumnList List of target columns , or null if not specified
* @ param append Whether to append fields to those in < code >
* baseRowType < / code >
* @ return Rowtype */
protected RelDataType createTargetRowType ( SqlValidatorTable table , SqlNodeList targetColumnList , boolean append ) { } }
|
RelDataType baseRowType = table . getRowType ( ) ; if ( targetColumnList == null ) { return baseRowType ; } List < RelDataTypeField > targetFields = baseRowType . getFieldList ( ) ; final List < Map . Entry < String , RelDataType > > fields = new ArrayList < > ( ) ; if ( append ) { for ( RelDataTypeField targetField : targetFields ) { fields . add ( Pair . of ( SqlUtil . deriveAliasFromOrdinal ( fields . size ( ) ) , targetField . getType ( ) ) ) ; } } final Set < Integer > assignedFields = new HashSet < > ( ) ; final RelOptTable relOptTable = table instanceof RelOptTable ? ( ( RelOptTable ) table ) : null ; for ( SqlNode node : targetColumnList ) { SqlIdentifier id = ( SqlIdentifier ) node ; RelDataTypeField targetField = SqlValidatorUtil . getTargetField ( baseRowType , typeFactory , id , catalogReader , relOptTable ) ; if ( targetField == null ) { throw newValidationError ( id , RESOURCE . unknownTargetColumn ( id . toString ( ) ) ) ; } if ( ! assignedFields . add ( targetField . getIndex ( ) ) ) { throw newValidationError ( id , RESOURCE . duplicateTargetColumn ( targetField . getName ( ) ) ) ; } fields . add ( targetField ) ; } return typeFactory . createStructType ( fields ) ;
|
public class DataSourceOptimizer { /** * Do main work about materialized view selection : transform user query to one or more sub - queries .
* In the sub - query , the dataSource is the derivative of dataSource in user query , and sum of all sub - queries '
* intervals equals the interval in user query
* Derived dataSource with smallest average data size per segment granularity have highest priority to replace the
* datasource in user query
* @ param query only TopNQuery / TimeseriesQuery / GroupByQuery can be optimized
* @ return a list of queries with specified derived dataSources and intervals */
public List < Query > optimize ( Query query ) { } }
|
long start = System . currentTimeMillis ( ) ; // only topN / timeseries / groupby query can be optimized
// only TableDataSource can be optimiezed
if ( ! ( query instanceof TopNQuery || query instanceof TimeseriesQuery || query instanceof GroupByQuery ) || ! ( query . getDataSource ( ) instanceof TableDataSource ) ) { return Collections . singletonList ( query ) ; } String datasourceName = ( ( TableDataSource ) query . getDataSource ( ) ) . getName ( ) ; // get all derivatives for datasource in query . The derivatives set is sorted by average size of
// per segment granularity .
Set < DerivativeDataSource > derivatives = DerivativeDataSourceManager . getDerivatives ( datasourceName ) ; if ( derivatives . isEmpty ( ) ) { return Collections . singletonList ( query ) ; } lock . readLock ( ) . lock ( ) ; try { totalCount . putIfAbsent ( datasourceName , new AtomicLong ( 0 ) ) ; hitCount . putIfAbsent ( datasourceName , new AtomicLong ( 0 ) ) ; costTime . putIfAbsent ( datasourceName , new AtomicLong ( 0 ) ) ; totalCount . get ( datasourceName ) . incrementAndGet ( ) ; // get all fields which the query required
Set < String > requiredFields = MaterializedViewUtils . getRequiredFields ( query ) ; Set < DerivativeDataSource > derivativesWithRequiredFields = new HashSet < > ( ) ; for ( DerivativeDataSource derivativeDataSource : derivatives ) { derivativesHitCount . putIfAbsent ( derivativeDataSource . getName ( ) , new AtomicLong ( 0 ) ) ; if ( derivativeDataSource . getColumns ( ) . containsAll ( requiredFields ) ) { derivativesWithRequiredFields . add ( derivativeDataSource ) ; } } // if no derivatives contains all required dimensions , this materialized view selection failed .
if ( derivativesWithRequiredFields . isEmpty ( ) ) { missFields . putIfAbsent ( datasourceName , new ConcurrentHashMap < > ( ) ) ; missFields . get ( datasourceName ) . putIfAbsent ( requiredFields , new AtomicLong ( 0 ) ) ; missFields . get ( datasourceName ) . get ( requiredFields ) . incrementAndGet ( ) ; costTime . get ( datasourceName ) . addAndGet ( System . currentTimeMillis ( ) - start ) ; return Collections . singletonList ( query ) ; } List < Query > queries = new ArrayList < > ( ) ; List < Interval > remainingQueryIntervals = ( List < Interval > ) query . getIntervals ( ) ; for ( DerivativeDataSource derivativeDataSource : ImmutableSortedSet . copyOf ( derivativesWithRequiredFields ) ) { final List < Interval > derivativeIntervals = remainingQueryIntervals . stream ( ) . flatMap ( interval -> serverView . getTimeline ( ( new TableDataSource ( derivativeDataSource . getName ( ) ) ) ) . lookup ( interval ) . stream ( ) . map ( TimelineObjectHolder :: getInterval ) ) . collect ( Collectors . toList ( ) ) ; // if the derivative does not contain any parts of intervals in the query , the derivative will
// not be selected .
if ( derivativeIntervals . isEmpty ( ) ) { continue ; } remainingQueryIntervals = MaterializedViewUtils . minus ( remainingQueryIntervals , derivativeIntervals ) ; queries . add ( query . withDataSource ( new TableDataSource ( derivativeDataSource . getName ( ) ) ) . withQuerySegmentSpec ( new MultipleIntervalSegmentSpec ( derivativeIntervals ) ) ) ; derivativesHitCount . get ( derivativeDataSource . getName ( ) ) . incrementAndGet ( ) ; if ( remainingQueryIntervals . isEmpty ( ) ) { break ; } } if ( queries . isEmpty ( ) ) { costTime . get ( datasourceName ) . addAndGet ( System . currentTimeMillis ( ) - start ) ; return Collections . singletonList ( query ) ; } // after materialized view selection , the result of the remaining query interval will be computed based on
// the original datasource .
if ( ! remainingQueryIntervals . isEmpty ( ) ) { queries . add ( query . withQuerySegmentSpec ( new MultipleIntervalSegmentSpec ( remainingQueryIntervals ) ) ) ; } hitCount . get ( datasourceName ) . incrementAndGet ( ) ; costTime . get ( datasourceName ) . addAndGet ( System . currentTimeMillis ( ) - start ) ; return queries ; } finally { lock . readLock ( ) . unlock ( ) ; }
|
public class DssatAFileOutput { /** * Remove the 2 - bit year number from input date string
* @ param str input date string
* @ return the days of year */
private String cutYear ( String str ) { } }
|
if ( str . length ( ) > 3 ) { return str . substring ( str . length ( ) - 3 , str . length ( ) ) ; } else { return str ; }
|
public class WorkflowsInner { /** * Get the workflow callback Url .
* @ param resourceGroupName The resource group name .
* @ param workflowName The workflow name .
* @ param listCallbackUrl Which callback url to list .
* @ param serviceCallback the async ServiceCallback to handle successful and failed responses .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the { @ link ServiceFuture } object */
public ServiceFuture < WorkflowTriggerCallbackUrlInner > listCallbackUrlAsync ( String resourceGroupName , String workflowName , GetCallbackUrlParameters listCallbackUrl , final ServiceCallback < WorkflowTriggerCallbackUrlInner > serviceCallback ) { } }
|
return ServiceFuture . fromResponse ( listCallbackUrlWithServiceResponseAsync ( resourceGroupName , workflowName , listCallbackUrl ) , serviceCallback ) ;
|
public class PangoolMultipleInputs { /** * Iterates over the Configuration and sets the specific context found for the
* input in the Job instance . Package - access so it can be unit tested . The
* specific context is configured in method this .
* { @ link # addInputContext ( Job , String , String , String ) } */
public static void setSpecificInputContext ( Configuration conf , String inputName , int inputId ) { } }
|
for ( Map . Entry < String , String > entries : conf ) { String confKey = entries . getKey ( ) ; String confValue = entries . getValue ( ) ; if ( confKey . startsWith ( MI_PREFIX + inputName + "." + inputId + CONF ) ) { // Specific context key , value found
String contextKey = confKey . substring ( ( MI_PREFIX + inputName + "." + inputId + CONF + "." ) . length ( ) , confKey . length ( ) ) ; conf . set ( contextKey , confValue ) ; } }
|
public class GeneralPurposeFFT_F32_2D { /** * Computes 2D forward DFT of complex data leaving the result in
* < code > a < / code > . The data is stored in 1D array in row - major order .
* Complex number is stored as two float values in sequence : the real and
* imaginary part , i . e . the input array must be of size rows * 2 * columns . The
* physical layout of the input data has to be as follows : < br >
* < pre >
* a [ k1*2 * columns + 2 * k2 ] = Re [ k1 ] [ k2 ] ,
* a [ k1*2 * columns + 2 * k2 + 1 ] = Im [ k1 ] [ k2 ] , 0 & lt ; = k1 & lt ; rows , 0 & lt ; = k2 & lt ; columns ,
* < / pre >
* @ param a
* data to transform */
public void complexForward ( final float [ ] a ) { } }
|
// handle special case
if ( rows == 1 || columns == 1 ) { if ( rows > 1 ) fftRows . complexForward ( a ) ; else fftColumns . complexForward ( a ) ; return ; } if ( isPowerOfTwo ) { int oldn2 = columns ; columns = 2 * columns ; for ( int r = 0 ; r < rows ; r ++ ) { fftColumns . complexForward ( a , r * columns ) ; } cdft2d_sub ( - 1 , a , true ) ; columns = oldn2 ; } else { final int rowStride = 2 * columns ; for ( int r = 0 ; r < rows ; r ++ ) { fftColumns . complexForward ( a , r * rowStride ) ; } for ( int c = 0 ; c < columns ; c ++ ) { int idx0 = 2 * c ; for ( int r = 0 ; r < rows ; r ++ ) { int idx1 = 2 * r ; int idx2 = r * rowStride + idx0 ; temp [ idx1 ] = a [ idx2 ] ; temp [ idx1 + 1 ] = a [ idx2 + 1 ] ; } fftRows . complexForward ( temp ) ; for ( int r = 0 ; r < rows ; r ++ ) { int idx1 = 2 * r ; int idx2 = r * rowStride + idx0 ; a [ idx2 ] = temp [ idx1 ] ; a [ idx2 + 1 ] = temp [ idx1 + 1 ] ; } } }
|
public class CentroidCluster { /** * { @ inheritDoc } */
public void addVector ( T vector , int id ) { } }
|
VectorMath . add ( centroid , vector ) ; if ( id >= 0 ) assignments . set ( id ) ;
|
public class QualifiedName { /** * Returns a { @ link QualifiedName } for { @ code cls } , unshading if necessary . */
public static QualifiedName of ( Class < ? > cls ) { } }
|
if ( cls . getEnclosingClass ( ) != null ) { return QualifiedName . of ( cls . getEnclosingClass ( ) ) . nestedType ( cls . getSimpleName ( ) ) ; } else if ( cls . getPackage ( ) != null ) { return QualifiedName . of ( cls . getPackage ( ) . getName ( ) , cls . getSimpleName ( ) ) ; } else { return QualifiedName . of ( "" , cls . getSimpleName ( ) ) ; }
|
public class KriptonContentValues { /** * Adds a value to the set .
* @ param value the data for the value to put */
public void put ( Integer value ) { } }
|
if ( value == null ) { this . compiledStatement . bindNull ( compiledStatementBindIndex ++ ) ; } else { compiledStatement . bindLong ( compiledStatementBindIndex ++ , value ) ; }
|
public class Transform { /** * Convert a { @ link ResultDescriptorDTO } to { @ link ResultDescriptor } .
* @ param rDescriptorDTO
* the DTO to transform
* @ return the corresponding descriptor */
static ResultDescriptor resultDescriptor ( ResultDescriptorDTO rDescriptorDTO ) { } }
|
ResultDescriptor rDescriptor = new ResultDescriptor ( rDescriptorDTO . getInterval ( ) ) ; for ( ResultDescriptorDTO . Datasource ds : rDescriptorDTO . getDatasources ( ) ) { if ( ds . getHeartbeat ( ) != null ) { rDescriptor . datasource ( ds . getLabel ( ) , ds . getSource ( ) , ds . getHeartbeat ( ) , ds . getFunction ( ) ) ; } else { rDescriptor . datasource ( ds . getLabel ( ) , ds . getSource ( ) , ds . getFunction ( ) ) ; } } for ( ResultDescriptorDTO . Expression expr : rDescriptorDTO . getExpressions ( ) ) { rDescriptor . expression ( expr . getLabel ( ) , expr . getExpression ( ) ) ; } rDescriptor . export ( rDescriptorDTO . getExports ( ) ) ; return rDescriptor ;
|
public class AbstractValidate { /** * < p > Validates that the index is within the bounds of the argument character sequence ; otherwise throwing an exception . < / p >
* < pre > Validate . validIndex ( myStr , 2 ) ; < / pre >
* < p > If the character sequence is { @ code null } , then the message of the exception is & quot ; The validated object is null & quot ; . < / p > < p > If the index is invalid , then the message of the exception is
* & quot ; The validated character sequence index is invalid : & quot ; followed by the index . < / p >
* @ param < T >
* the character sequence type
* @ param chars
* the character sequence to check , validated not null by this method
* @ param index
* the index to check
* @ return the validated character sequence ( never { @ code null } for method chaining )
* @ throws NullPointerValidationException
* if the character sequence is { @ code null }
* @ throws IndexOutOfBoundsException
* if the index is invalid
* @ see # validIndex ( CharSequence , int , String , Object . . . ) */
public < T extends CharSequence > T validIndex ( final T chars , final int index ) { } }
|
return validIndex ( chars , index , DEFAULT_VALID_INDEX_CHAR_SEQUENCE_EX_MESSAGE , index ) ;
|
public class FeatureOverlayQuery { /** * Perform a query based upon the map click location and build a info message
* @ param latLng location
* @ param zoom current zoom level
* @ param mapBounds map view bounds
* @ param tolerance tolerance distance
* @ param projection desired geometry projection
* @ return information message on what was clicked , or nil
* @ since 2.0.0 */
public String buildMapClickMessageWithMapBounds ( LatLng latLng , double zoom , BoundingBox mapBounds , double tolerance , Projection projection ) { } }
|
// Build a bounding box to represent the click location
BoundingBox boundingBox = buildClickBoundingBox ( latLng , mapBounds ) ; String message = buildMapClickMessage ( latLng , zoom , boundingBox , tolerance , projection ) ; return message ;
|
public class XmlStreamReaderUtils { /** * Returns the value of an attribute as a byte . If the attribute is empty , this method returns
* the default value provided .
* @ param reader
* < code > XMLStreamReader < / code > that contains attribute values .
* @ param namespace
* String
* @ param localName
* local name of attribute ( the namespace is ignored ) .
* @ param defaultValue
* default value
* @ return value of attribute , or the default value if the attribute is empty . */
public static byte optionalByteAttribute ( final XMLStreamReader reader , final String namespace , final String localName , final byte defaultValue ) { } }
|
final String value = reader . getAttributeValue ( namespace , localName ) ; if ( value != null ) { return Byte . parseByte ( value ) ; } return defaultValue ;
|
public class diff_match_patch { /** * Find the differences between two texts . Assumes that the texts do not
* have any common prefix or suffix .
* @ param text1
* Old string to be diffed .
* @ param text2
* New string to be diffed .
* @ param checklines
* Speedup flag . If false , then don ' t run a line - level diff first
* to identify the changed areas . If true , then run a faster
* slightly less optimal diff .
* @ param deadline
* Time when the diff should be complete by .
* @ return Linked List of Diff objects . */
private LinkedList < Diff > diff_compute ( String text1 , String text2 , boolean checklines , long deadline ) { } }
|
LinkedList < Diff > diffs = new LinkedList < Diff > ( ) ; if ( text1 . length ( ) == 0 ) { // Just add some text ( speedup ) .
diffs . add ( new Diff ( Operation . INSERT , text2 ) ) ; return diffs ; } if ( text2 . length ( ) == 0 ) { // Just delete some text ( speedup ) .
diffs . add ( new Diff ( Operation . DELETE , text1 ) ) ; return diffs ; } String longtext = text1 . length ( ) > text2 . length ( ) ? text1 : text2 ; String shorttext = text1 . length ( ) > text2 . length ( ) ? text2 : text1 ; int i = longtext . indexOf ( shorttext ) ; if ( i != - 1 ) { // Shorter text is inside the longer text ( speedup ) .
Operation op = ( text1 . length ( ) > text2 . length ( ) ) ? Operation . DELETE : Operation . INSERT ; diffs . add ( new Diff ( op , longtext . substring ( 0 , i ) ) ) ; diffs . add ( new Diff ( Operation . EQUAL , shorttext ) ) ; diffs . add ( new Diff ( op , longtext . substring ( i + shorttext . length ( ) ) ) ) ; return diffs ; } if ( shorttext . length ( ) == 1 ) { // Single character string .
// After the previous speedup , the character can ' t be an equality .
diffs . add ( new Diff ( Operation . DELETE , text1 ) ) ; diffs . add ( new Diff ( Operation . INSERT , text2 ) ) ; return diffs ; } // Check to see if the problem can be split in two .
String [ ] hm = diff_halfMatch ( text1 , text2 ) ; if ( hm != null ) { // A half - match was found , sort out the return data .
String text1_a = hm [ 0 ] ; String text1_b = hm [ 1 ] ; String text2_a = hm [ 2 ] ; String text2_b = hm [ 3 ] ; String mid_common = hm [ 4 ] ; // Send both pairs off for separate processing .
LinkedList < Diff > diffs_a = diff_main ( text1_a , text2_a , checklines , deadline ) ; LinkedList < Diff > diffs_b = diff_main ( text1_b , text2_b , checklines , deadline ) ; // Merge the results .
diffs = diffs_a ; diffs . add ( new Diff ( Operation . EQUAL , mid_common ) ) ; diffs . addAll ( diffs_b ) ; return diffs ; } if ( checklines && text1 . length ( ) > 100 && text2 . length ( ) > 100 ) { return diff_lineMode ( text1 , text2 , deadline ) ; } return diff_bisect ( text1 , text2 , deadline ) ;
|
public class ImageIOGreyScale { /** * Returns an < code > Iterator < / code > containing all currently registered < code > ImageWriter < / code > s that
* claim to be able to encode files with the given suffix .
* @ param fileSuffix
* a < code > String < / code > containing a file suffix ( < i > e . g . < / i > , " jpg " or " tiff " ) .
* @ return an < code > Iterator < / code > containing < code > ImageWriter < / code > s .
* @ exception IllegalArgumentException
* if < code > fileSuffix < / code > is < code > null < / code > .
* @ see javax . imageio . spi . ImageWriterSpi # getFileSuffixes */
public static Iterator < ImageWriter > getImageWritersBySuffix ( String fileSuffix ) { } }
|
if ( fileSuffix == null ) { throw new IllegalArgumentException ( "fileSuffix == null!" ) ; } Iterator iter ; // Ensure category is present
try { iter = theRegistry . getServiceProviders ( ImageWriterSpi . class , new ContainsFilter ( writerFileSuffixesMethod , fileSuffix ) , true ) ; } catch ( IllegalArgumentException e ) { return new HashSet ( ) . iterator ( ) ; } return new ImageWriterIterator ( iter ) ;
|
public class AWSCodePipelineClient { /** * Returns information about the state of a pipeline , including the stages and actions .
* < note >
* Values returned in the revisionId and revisionUrl fields indicate the source revision information , such as the
* commit ID , for the current state .
* < / note >
* @ param getPipelineStateRequest
* Represents the input of a GetPipelineState action .
* @ return Result of the GetPipelineState operation returned by the service .
* @ throws ValidationException
* The validation was specified in an invalid format .
* @ throws PipelineNotFoundException
* The specified pipeline was specified in an invalid format or cannot be found .
* @ sample AWSCodePipeline . GetPipelineState
* @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / codepipeline - 2015-07-09 / GetPipelineState " target = " _ top " > AWS
* API Documentation < / a > */
@ Override public GetPipelineStateResult getPipelineState ( GetPipelineStateRequest request ) { } }
|
request = beforeClientExecution ( request ) ; return executeGetPipelineState ( request ) ;
|
public class SimpleFormValidator { /** * Adds field to this form without attaching any { @ link FormInputValidator } to it . This can be used when field
* already has added all required validators . */
public void add ( VisValidatableTextField field ) { } }
|
if ( fields . contains ( field , true ) == false ) fields . add ( field ) ; field . addListener ( changeListener ) ; // addListener won ' t allow to add same listener twice
validate ( ) ;
|
public class PolicyEventsInner { /** * Gets OData metadata XML document .
* @ param scope A valid scope , i . e . management group , subscription , resource group , or resource ID . Scope used has no effect on metadata returned .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the observable to the String object */
public Observable < ServiceResponse < String > > getMetadataWithServiceResponseAsync ( String scope ) { } }
|
if ( scope == null ) { throw new IllegalArgumentException ( "Parameter scope is required and cannot be null." ) ; } if ( this . client . apiVersion ( ) == null ) { throw new IllegalArgumentException ( "Parameter this.client.apiVersion() is required and cannot be null." ) ; } return service . getMetadata ( scope , this . client . apiVersion ( ) , this . client . acceptLanguage ( ) , this . client . userAgent ( ) ) . flatMap ( new Func1 < Response < ResponseBody > , Observable < ServiceResponse < String > > > ( ) { @ Override public Observable < ServiceResponse < String > > call ( Response < ResponseBody > response ) { try { ServiceResponse < String > clientResponse = getMetadataDelegate ( response ) ; return Observable . just ( clientResponse ) ; } catch ( Throwable t ) { return Observable . error ( t ) ; } } } ) ;
|
public class NarrowToWidePtoP_F32 { /** * Apply the transformation
* @ param x x - coordinate of point in pixels . Synthetic narrow FOV camera
* @ param y y - coordinate of point in pixels . Synthetic narrow FOV camera
* @ param out Pixel location of point in wide FOV camera . */
@ Override public void compute ( float x , float y , Point2D_F32 out ) { } }
|
narrowToNorm . compute ( x , y , norm ) ; // Convert from 2D homogenous to 3D
unit . set ( norm . x , norm . y , 1.0f ) ; // Rotate then make it a unit vector
GeometryMath_F32 . mult ( rotateWideToNarrow , unit , unit ) ; float n = unit . norm ( ) ; unit . x /= n ; unit . y /= n ; unit . z /= n ; unitToWide . compute ( unit . x , unit . y , unit . z , out ) ;
|
public class DisambiguationPatternRule { /** * Performs disambiguation on the source sentence .
* @ param sentence { @ link AnalyzedSentence } Sentence to be disambiguated .
* @ return { @ link AnalyzedSentence } Disambiguated sentence ( might be unchanged ) . */
public final AnalyzedSentence replace ( AnalyzedSentence sentence ) throws IOException { } }
|
DisambiguationPatternRuleReplacer replacer = new DisambiguationPatternRuleReplacer ( this ) ; return replacer . replace ( sentence ) ;
|
public class TrConfigurator { /** * Call { @ link TraceComponentChangeListener } s indicating the specified trace
* component was updated .
* @ param tc
* the { @ link TraceComponent } that was updated */
static void traceComponentUpdated ( TraceComponent tc ) { } }
|
for ( TraceComponentChangeListener listener : registeredListeners . get ( ) ) { listener . traceComponentUpdated ( tc ) ; }
|
public class VersionParser { /** * Parses the { @ literal < pre - release > } non - terminal .
* < pre >
* { @ literal
* < pre - release > : : = < dot - separated pre - release identifiers >
* < dot - separated pre - release identifiers > : : = < pre - release identifier >
* | < pre - release identifier > " . " < dot - separated pre - release identifiers >
* < / pre >
* @ return a valid pre - release version object */
private MetadataVersion parsePreRelease ( ) { } }
|
ensureValidLookahead ( CharType . DIGIT , CharType . LETTER , CharType . HYPHEN ) ; List < String > idents = new ArrayList < String > ( ) ; do { idents . add ( preReleaseIdentifier ( ) ) ; if ( chars . positiveLookahead ( CharType . DOT ) ) { consumeNextCharacter ( CharType . DOT ) ; continue ; } break ; } while ( true ) ; return new MetadataVersion ( idents . toArray ( new String [ idents . size ( ) ] ) ) ;
|
public class SecureRandomStrategyFactory { /** * - - - FACTORY METHOD - - - */
@ Override public < T extends Endpoint > Strategy < T > create ( ) { } }
|
return new SecureRandomStrategy < T > ( broker , preferLocal ) ;
|
public class AbstractJSSEProvider { /** * Get the trust manager factory instance using the provided information .
* @ see com . ibm . websphere . ssl . JSSEProvider # getTrustManagerFactoryInstance ( )
* @ param trustMgr
* @ param ctxtProvider
* @ return TrustManagerFactory
* @ throws NoSuchAlgorithmException
* @ throws NoSuchProviderException */
public TrustManagerFactory getTrustManagerFactoryInstance ( String trustMgr , String ctxtProvider ) throws NoSuchAlgorithmException , NoSuchProviderException { } }
|
String mgr = trustMgr ; String provider = ctxtProvider ; if ( mgr . indexOf ( '|' ) != - 1 ) { String [ ] trustManagerArray = mgr . split ( "\\|" ) ; if ( trustManagerArray != null && trustManagerArray . length == 2 ) { mgr = trustManagerArray [ 0 ] ; provider = trustManagerArray [ 1 ] ; } } TrustManagerFactory rc = TrustManagerFactory . getInstance ( mgr , provider ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) Tr . debug ( tc , "getTrustManagerFactory.getInstance(" + mgr + ", " + provider + ")" + rc ) ; return rc ;
|
public class CopyToModule { /** * Get path to root map
* @ param traceFilename absolute input file
* @ param inputMap absolute path to start file
* @ return path to base directory , { @ code null } if not available */
public static File getPathtoRootmap ( final URI traceFilename , final URI inputMap ) { } }
|
assert traceFilename . isAbsolute ( ) ; assert inputMap . isAbsolute ( ) ; return toFile ( getRelativePath ( traceFilename , inputMap ) ) . getParentFile ( ) ;
|
public class ClassHelper { /** * Determines the method with the specified signature via reflection look - up .
* @ param clazz The java class to search in
* @ param methodName The method ' s name
* @ param params The parameter types
* @ return The method object or < code > null < / code > if no matching method was found */
public static Method getMethod ( Class clazz , String methodName , Class [ ] params ) { } }
|
try { return clazz . getMethod ( methodName , params ) ; } catch ( Exception ignored ) { } return null ;
|
public class Clients { /** * Return the right client according to the specific name .
* @ param name name of the client
* @ return the right client */
public Optional < Client > findClient ( final String name ) { } }
|
CommonHelper . assertNotBlank ( "name" , name ) ; init ( ) ; final String lowerTrimmedName = name . toLowerCase ( ) . trim ( ) ; final Client client = _clients . get ( lowerTrimmedName ) ; if ( client != null ) { return Optional . of ( client ) ; } LOGGER . debug ( "No client found for name: {}" , name ) ; return Optional . empty ( ) ;
|
public class TCPChannelFactory { /** * Declared abstract in the ChannelFactoryImpl class .
* @ param channelData
* @ return Channel
* @ throws ChannelException */
protected Channel createChannel ( ChannelData channelData ) throws ChannelException { } }
|
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) { Tr . entry ( tc , "createChannel" ) ; } TCPChannelConfiguration newCC = new TCPChannelConfiguration ( channelData ) ; TCPChannel channel = null ; boolean isOverrideClassUsed = false ; if ( ( ! newCC . isNIOOnly ( ) ) && ( commClass != null ) && ( ChannelFrameworkImpl . getRef ( ) . getAsyncIOEnabled ( ) ) ) { try { channel = ( TCPChannel ) commClass . newInstance ( ) ; ChannelTermination ct = channel . setup ( channelData , newCC , this ) ; if ( ct != null ) { this . terminationList . put ( commClassName , ct ) ; } isOverrideClassUsed = true ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "using CommClass: " + commClass ) ; } } catch ( Exception e ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "Exception trying to instantiate CommClass: " + e ) ; } } } if ( ! isOverrideClassUsed ) { channel = new NioTCPChannel ( ) ; ChannelTermination ct = channel . setup ( channelData , newCC , this ) ; if ( ct != null ) { this . terminationList . put ( "NioTCPChannel" , ct ) ; } } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) { Tr . exit ( tc , "createChannel" ) ; } return channel ;
|
public class WebSocketNode { /** * 获取在线用户的详细连接信息 < br >
* Map . key 为 SNCP节点地址 , 含值为null的key表示没有分布式
* Map . value 为 用户客户端的IP
* @ param userid Serializable
* @ return 地址集合 */
public CompletableFuture < Map < InetSocketAddress , List < String > > > getRpcNodeWebSocketAddresses ( final Serializable userid ) { } }
|
CompletableFuture < Collection < InetSocketAddress > > sncpFuture = getRpcNodeAddresses ( userid ) ; return sncpFuture . thenCompose ( ( Collection < InetSocketAddress > addrs ) -> { if ( logger . isLoggable ( Level . FINEST ) ) logger . finest ( "websocket found userid:" + userid + " on " + addrs ) ; if ( addrs == null || addrs . isEmpty ( ) ) return CompletableFuture . completedFuture ( new HashMap < > ( ) ) ; CompletableFuture < Map < InetSocketAddress , List < String > > > future = null ; for ( final InetSocketAddress nodeAddress : addrs ) { CompletableFuture < Map < InetSocketAddress , List < String > > > mapFuture = getWebSocketAddresses ( nodeAddress , userid ) . thenCompose ( ( List < String > list ) -> CompletableFuture . completedFuture ( Utility . ofMap ( nodeAddress , list ) ) ) ; future = future == null ? mapFuture : future . thenCombine ( mapFuture , ( a , b ) -> Utility . merge ( a , b ) ) ; } return future == null ? CompletableFuture . completedFuture ( new HashMap < > ( ) ) : future ; } ) ;
|
public class ProviderManager { /** * Returns an unmodifiable collection of all PacketExtensionProvider instances . Each object
* in the collection will either be a PacketExtensionProvider instance , or a Class object
* that implements the PacketExtensionProvider interface .
* @ return all PacketExtensionProvider instances . */
public static List < ExtensionElementProvider < ExtensionElement > > getExtensionProviders ( ) { } }
|
List < ExtensionElementProvider < ExtensionElement > > providers = new ArrayList < > ( extensionProviders . size ( ) ) ; providers . addAll ( extensionProviders . values ( ) ) ; return providers ;
|
public class ServletHttpRequest { public String getLocalName ( ) { } }
|
HttpConnection connection = _httpRequest . getHttpConnection ( ) ; if ( connection != null ) return connection . getServerName ( ) ; return null ;
|
public class DependencyContainer { /** * Sets the check modification check interval in milliseconds .
* Negative values mean never check . 0 means always check .
* @ param checkInterval how often the dependency should be checked */
public void setCheckInterval ( long checkInterval ) { } }
|
if ( checkInterval < 0 || checkInterval > Long . MAX_VALUE / 2 ) _checkInterval = Long . MAX_VALUE / 2 ; else _checkInterval = checkInterval ; _checkExpiresTime = 0 ;
|
public class StreamUtility { /** * Prints an XML - appropriate encoding of the given range of characters to
* the given Writer .
* @ param in
* The char buffer to read from .
* @ param start
* The starting index .
* @ param length
* The number of characters in the range .
* @ param out
* The Appendable to write to . */
public static void enc ( char [ ] in , int start , int length , StringBuffer out ) { } }
|
for ( int i = start ; i < length + start ; i ++ ) { enc ( in [ i ] , out ) ; }
|
public class FontUtils { /** * Gets the named font with the specified style , correctly scaled
* @ param name
* @ param style
* @ return the named font with the specified style , correctly scaled */
public static Font getFont ( String name , int style ) { } }
|
return new Font ( name , style , getDefaultFont ( ) . getSize ( ) ) ;
|
public class MalisisGui { /** * Called when a key is pressed on the keyboard . */
@ Override protected void keyTyped ( char keyChar , int keyCode ) { } }
|
try { boolean ret = false ; for ( IKeyListener listener : keyListeners ) ret |= listener . onKeyTyped ( keyChar , keyCode ) ; if ( ret ) return ; if ( focusedComponent != null && ! keyListeners . contains ( focusedComponent ) && focusedComponent . onKeyTyped ( keyChar , keyCode ) ) return ; if ( hoveredComponent != null && ! keyListeners . contains ( hoveredComponent ) && hoveredComponent . onKeyTyped ( keyChar , keyCode ) ) return ; if ( isGuiCloseKey ( keyCode ) && mc . currentScreen == this ) close ( ) ; if ( ! MalisisCore . isObfEnv && isCtrlKeyDown ( ) && ( current ( ) != null || isOverlay ) ) { if ( keyCode == Keyboard . KEY_R ) { clearScreen ( ) ; setResolution ( ) ; setHoveredComponent ( null , true ) ; setFocusedComponent ( null , true ) ; constructed = false ; doConstruct ( ) ; } if ( keyCode == Keyboard . KEY_D ) { debug = ! debug ; debugComponent . setEnabled ( debug ) ; } if ( keyCode == Keyboard . KEY_P ) { Position . CACHED = ! Position . CACHED ; } if ( keyCode == Keyboard . KEY_S ) { Size . CACHED = ! Size . CACHED ; } } } catch ( Exception e ) { MalisisCore . message ( "A problem occured while handling key typed for " + e . getClass ( ) . getSimpleName ( ) + ": " + e . getMessage ( ) ) ; e . printStackTrace ( new PrintStream ( new FileOutputStream ( FileDescriptor . out ) ) ) ; }
|
public class DateExtensions { /** * Returns a map with all date patterns from the Interface DatePatterns . As key is the name from
* the pattern .
* @ return Returns a Map with all date patterns from the Interface DatePatterns .
* @ throws IllegalAccessException
* is thrown when an application tries to reflectively create an instance */
public static Map < String , Object > getAllDatePatterns ( ) throws IllegalAccessException { } }
|
final Field [ ] fields = DatePatterns . class . getFields ( ) ; final Map < String , Object > patterns = new HashMap < > ( fields . length ) ; for ( final Field field : fields ) { patterns . put ( field . getName ( ) , field . get ( field . getName ( ) ) ) ; } return patterns ;
|
public class PrincipalUserSerializer { /** * ~ Methods * * * * * */
@ Override public void serialize ( PrincipalUser value , JsonGenerator jgen , SerializerProvider provider ) throws IOException , JsonProcessingException { } }
|
jgen . writeStartObject ( ) ; jgen . writeStringField ( "id" , value . getId ( ) . toString ( ) ) ; jgen . writeStringField ( "createdBy" , value . getCreatedBy ( ) == null ? null : value . getCreatedBy ( ) . getUserName ( ) ) ; jgen . writeNumberField ( "createdDate" , value . getCreatedDate ( ) == null ? null : value . getCreatedDate ( ) . getTime ( ) ) ; jgen . writeStringField ( "modifiedBy" , value . getModifiedBy ( ) == null ? null : value . getModifiedBy ( ) . getUserName ( ) ) ; jgen . writeNumberField ( "modifiedDate" , value . getModifiedDate ( ) == null ? null : value . getModifiedDate ( ) . getTime ( ) ) ; jgen . writeStringField ( "userName" , value . getUserName ( ) ) ; jgen . writeStringField ( "email" , value . getEmail ( ) ) ; jgen . writeBooleanField ( "privileged" , value . isPrivileged ( ) ) ; jgen . writeEndObject ( ) ;
|
public class MySQLMetadataDAO { /** * Query persistence for all defined { @ link TaskDef } data , and cache it in { @ link # taskDefCache } . */
private void refreshTaskDefs ( ) { } }
|
try { withTransaction ( tx -> { Map < String , TaskDef > map = new HashMap < > ( ) ; findAllTaskDefs ( tx ) . forEach ( taskDef -> map . put ( taskDef . getName ( ) , taskDef ) ) ; synchronized ( taskDefCache ) { taskDefCache . clear ( ) ; taskDefCache . putAll ( map ) ; } if ( logger . isTraceEnabled ( ) ) { logger . trace ( "Refreshed {} TaskDefs" , taskDefCache . size ( ) ) ; } } ) ; } catch ( Exception e ) { Monitors . error ( className , "refreshTaskDefs" ) ; logger . error ( "refresh TaskDefs failed " , e ) ; }
|
public class CommerceShipmentPersistenceImpl { /** * Caches the commerce shipment in the entity cache if it is enabled .
* @ param commerceShipment the commerce shipment */
@ Override public void cacheResult ( CommerceShipment commerceShipment ) { } }
|
entityCache . putResult ( CommerceShipmentModelImpl . ENTITY_CACHE_ENABLED , CommerceShipmentImpl . class , commerceShipment . getPrimaryKey ( ) , commerceShipment ) ; commerceShipment . resetOriginalValues ( ) ;
|
public class StatementCacheKey { /** * @ param b an array of 8 bytes . This value cannot be a null .
* @ return a unique long value representing the first 8 bytes of the byte array . */
public static final long toLong ( byte [ ] b ) { } }
|
long x = 0 ; for ( int i = b . length ; i > 0 ; x |= ( b [ -- i ] & 0xffL ) << ( i << 3 ) ) ; return x ;
|
public class GreenPepperPluginListener { /** * { @ inheritDoc } */
public void handleEvent ( Event event ) { } }
|
if ( event instanceof PluginInstallEvent || event instanceof PluginEnableEvent ) { try { getServerConfigurationActivator ( ) . startup ( false ) ; } catch ( GreenPepperServerException ex ) { log . error ( "Post-install : startup failed" , ex ) ; throw new RuntimeException ( "Post-install : startup failed" , ex ) ; } }
|
public class sslcipher { /** * Use this API to fetch sslcipher resources of given names . */
public static sslcipher [ ] get ( nitro_service service , String ciphergroupname [ ] ) throws Exception { } }
|
if ( ciphergroupname != null && ciphergroupname . length > 0 ) { sslcipher response [ ] = new sslcipher [ ciphergroupname . length ] ; sslcipher obj [ ] = new sslcipher [ ciphergroupname . length ] ; for ( int i = 0 ; i < ciphergroupname . length ; i ++ ) { obj [ i ] = new sslcipher ( ) ; obj [ i ] . set_ciphergroupname ( ciphergroupname [ i ] ) ; response [ i ] = ( sslcipher ) obj [ i ] . get_resource ( service ) ; } return response ; } return null ;
|
public class GuaranteedTargetStream { /** * Utility method to send Ack up to completedPrefix */
private void sendAck ( ) { } }
|
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( this , tc , "sendAck" ) ; long completedPrefix = oststream . getCompletedPrefix ( ) ; // Now that all messages are delivered we need to send Acks
// sendAck back to sending ME ( RemoteMessageTransmitter )
try { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) SibTr . debug ( tc , "sendAck up to: " + completedPrefix ) ; upControl . sendAckMessage ( streamSet . getRemoteMEUuid ( ) , streamSet . getDestUuid ( ) , streamSet . getBusUuid ( ) , completedPrefix , priority , reliability , streamSet . getStreamID ( ) , true ) ; this . _lastAckedTick = completedPrefix ; } catch ( SIResourceException e ) { // FFDC
FFDCFilter . processException ( e , "com.ibm.ws.sib.processor.gd.GuaranteedTargetStream.sendAck" , "1:2019:1.110" , this ) ; SibTr . exception ( tc , e ) ; // TODO don ' t know what to do here - rethrow ? !
} if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "sendAck" ) ;
|
public class NodeStack { /** * Push a node and associated state onto the stack . */
public void push ( int state , GBSNode node ) { } }
|
_cidx ++ ; _idx = _cidx ; if ( _cidx > GBSTree . maxDepth ) throw new OptimisticDepthException ( "maxDepth (" + GBSTree . maxDepth + ") exceeded. Depth is " + _cidx + "." ) ; // _ idx + + ;
// _ cidx = _ idx ;
// _ endIndex = _ idx ;
_state [ _cidx ] = state ; _node [ _cidx ] = node ; if ( index ( ) > _maxIdx ) _maxIdx = index ( ) ;
|
public class IncidentUtil { /** * Makes a V2 incident object and triggers ir , effectively reporting the
* given exception to GS and possibly to crashmanager
* @ param session SFSession object to talk to GS through
* @ param exc the Throwable we should report
* @ param jobId jobId that failed
* @ param requestId requestId that failed
* @ return the given Throwable object */
public static Throwable generateIncidentV2WithException ( SFSession session , Throwable exc , String jobId , String requestId ) { } }
|
new Incident ( session , exc , jobId , requestId ) . trigger ( ) ; return exc ;
|
public class SeleniumSpec { /** * Verifies that a webelement previously found { @ code isSelected }
* @ param index
* @ param isSelected */
@ Then ( "^the element on index '(\\d+?)' (IS|IS NOT) selected$" ) public void assertSeleniumIsSelected ( Integer index , Boolean isSelected ) { } }
|
assertThat ( this . commonspec , commonspec . getPreviousWebElements ( ) ) . as ( "There are less found elements than required" ) . hasAtLeast ( index ) ; assertThat ( this . commonspec , commonspec . getPreviousWebElements ( ) . getPreviousWebElements ( ) . get ( index ) . isSelected ( ) ) . as ( "Unexpected element selected property" ) . isEqualTo ( isSelected ) ;
|
public class ProtoTruthMessageDifferencer { /** * If there is no match , returns null . */
@ NullableDecl private RepeatedField . PairResult findMatchingPairResult ( Deque < Integer > actualIndices , List < ? > actualValues , int expectedIndex , Object expectedValue , boolean excludeNonRecursive , FieldDescriptor fieldDescriptor , FluentEqualityConfig config ) { } }
|
Iterator < Integer > actualIndexIter = actualIndices . iterator ( ) ; while ( actualIndexIter . hasNext ( ) ) { int actualIndex = actualIndexIter . next ( ) ; RepeatedField . PairResult pairResult = compareRepeatedFieldElementPair ( actualValues . get ( actualIndex ) , expectedValue , excludeNonRecursive , fieldDescriptor , actualIndex , expectedIndex , config ) ; if ( pairResult . isMatched ( ) ) { actualIndexIter . remove ( ) ; return pairResult ; } } return null ;
|
public class Ifc2x3tc1FactoryImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
public IfcReflectanceMethodEnum createIfcReflectanceMethodEnumFromString ( EDataType eDataType , String initialValue ) { } }
|
IfcReflectanceMethodEnum result = IfcReflectanceMethodEnum . get ( initialValue ) ; if ( result == null ) throw new IllegalArgumentException ( "The value '" + initialValue + "' is not a valid enumerator of '" + eDataType . getName ( ) + "'" ) ; return result ;
|
public class Utils { /** * Gets the colorAccent from the current context , if possible / available
* @ param context The context to use as reference for the color
* @ return the accent color of the current context */
public static int getAccentColorFromThemeIfAvailable ( Context context ) { } }
|
TypedValue typedValue = new TypedValue ( ) ; // First , try the android : colorAccent
if ( Build . VERSION . SDK_INT >= 21 ) { context . getTheme ( ) . resolveAttribute ( android . R . attr . colorAccent , typedValue , true ) ; return typedValue . data ; } // Next , try colorAccent from support lib
int colorAccentResId = context . getResources ( ) . getIdentifier ( "colorAccent" , "attr" , context . getPackageName ( ) ) ; if ( colorAccentResId != 0 && context . getTheme ( ) . resolveAttribute ( colorAccentResId , typedValue , true ) ) { return typedValue . data ; } // Return the value in mdtp _ accent _ color
return ContextCompat . getColor ( context , R . color . mdtp_accent_color ) ;
|
public class AwsSecurityFindingFilters { /** * The source port of network - related information about a finding .
* < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use
* { @ link # setNetworkSourcePort ( java . util . Collection ) } or { @ link # withNetworkSourcePort ( java . util . Collection ) } if
* you want to override the existing values .
* @ param networkSourcePort
* The source port of network - related information about a finding .
* @ return Returns a reference to this object so that method calls can be chained together . */
public AwsSecurityFindingFilters withNetworkSourcePort ( NumberFilter ... networkSourcePort ) { } }
|
if ( this . networkSourcePort == null ) { setNetworkSourcePort ( new java . util . ArrayList < NumberFilter > ( networkSourcePort . length ) ) ; } for ( NumberFilter ele : networkSourcePort ) { this . networkSourcePort . add ( ele ) ; } return this ;
|
public class ExpressionBuilderImpl { /** * Create a reference to " super " object or to the super type .
* @ return the reference . */
public XFeatureCall createReferenceToSuper ( ) { } }
|
final XExpression expr = getXExpression ( ) ; XtendTypeDeclaration type = EcoreUtil2 . getContainerOfType ( expr , XtendTypeDeclaration . class ) ; JvmType jvmObject = getAssociatedElement ( JvmType . class , type , expr . eResource ( ) ) ; final XFeatureCall superFeature = XbaseFactory . eINSTANCE . createXFeatureCall ( ) ; JvmIdentifiableElement feature ; if ( jvmObject instanceof JvmDeclaredType ) { feature = ( ( JvmDeclaredType ) jvmObject ) . getExtendedClass ( ) . getType ( ) ; } else { feature = findType ( expr , getQualifiedName ( type ) ) . getType ( ) ; if ( feature instanceof JvmDeclaredType ) { feature = ( ( JvmDeclaredType ) feature ) . getExtendedClass ( ) . getType ( ) ; } else { feature = null ; } } if ( feature == null ) { return null ; } superFeature . setFeature ( feature ) ; return superFeature ;
|
public class SymbolManager { /** * Set the TextOptional property
* If true , icons will display without their corresponding text when the text collides with other symbols and the icon does not .
* @ param value property wrapper value around Boolean */
public void setTextOptional ( Boolean value ) { } }
|
PropertyValue propertyValue = textOptional ( value ) ; constantPropertyUsageMap . put ( PROPERTY_TEXT_OPTIONAL , propertyValue ) ; layer . setProperties ( propertyValue ) ;
|
public class JMXSecurityMBeanServer { /** * Insert the JMX security filter upon activation . This will only
* happen if we have both the MBeanServerPipeline and the SecurityService .
* @ param cc */
protected synchronized void activate ( ComponentContext cc ) { } }
|
pipelineRef . activate ( cc ) ; securityServiceRef . activate ( cc ) ; insertJMXSecurityFilter ( ) ;
|
public class HAProxyMessageDecoder { /** * Create a frame out of the { @ link ByteBuf } and return it .
* Based on code from { @ link LineBasedFrameDecoder # decode ( ChannelHandlerContext , ByteBuf ) } .
* @ param ctx the { @ link ChannelHandlerContext } which this { @ link HAProxyMessageDecoder } belongs to
* @ param buffer the { @ link ByteBuf } from which to read data
* @ return frame the { @ link ByteBuf } which represent the frame or { @ code null } if no frame could
* be created */
private ByteBuf decodeStruct ( ChannelHandlerContext ctx , ByteBuf buffer ) throws Exception { } }
|
final int eoh = findEndOfHeader ( buffer ) ; if ( ! discarding ) { if ( eoh >= 0 ) { final int length = eoh - buffer . readerIndex ( ) ; if ( length > v2MaxHeaderSize ) { buffer . readerIndex ( eoh ) ; failOverLimit ( ctx , length ) ; return null ; } return buffer . readSlice ( length ) ; } else { final int length = buffer . readableBytes ( ) ; if ( length > v2MaxHeaderSize ) { discardedBytes = length ; buffer . skipBytes ( length ) ; discarding = true ; failOverLimit ( ctx , "over " + discardedBytes ) ; } return null ; } } else { if ( eoh >= 0 ) { buffer . readerIndex ( eoh ) ; discardedBytes = 0 ; discarding = false ; } else { discardedBytes = buffer . readableBytes ( ) ; buffer . skipBytes ( discardedBytes ) ; } return null ; }
|
public class TaggingService { /** * The main method .
* @ param args the arguments */
public static void main ( String [ ] args ) { } }
|
ServiceOption option = new ServiceOption ( ) ; CmdLineParser parser = new CmdLineParser ( option ) ; if ( args . length == 0 ) { System . out . println ( "TaggingService [options...] [arguments..]" ) ; parser . printUsage ( System . out ) ; return ; } new TaggingService ( option ) . run ( ) ;
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.