signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class ElasticSearchDruidDataSource { /** * 抛弃连接 , 不进行回收 , 而是抛弃 * @ param realConnection * @ throws SQLException */ public void discardConnection ( Connection realConnection ) { } }
JdbcUtils . close ( realConnection ) ; lock . lock ( ) ; try { activeCount -- ; discardCount ++ ; if ( activeCount <= 0 ) { emptySignal ( ) ; } } finally { lock . unlock ( ) ; }
public class Util { /** * Check whether there are any element nodes in a { @ link NodeList } */ static final boolean textNodesOnly ( NodeList list ) { } }
final int length = list . getLength ( ) ; for ( int i = 0 ; i < length ; i ++ ) if ( list . item ( i ) . getNodeType ( ) != Node . TEXT_NODE ) return false ; return true ;
public class SparkComputationGraph { /** * DataSet version of { @ link # scoreExamples ( JavaPairRDD , boolean , int ) } */ public < K > JavaPairRDD < K , Double > scoreExamples ( JavaPairRDD < K , DataSet > data , boolean includeRegularizationTerms , int batchSize ) { } }
return scoreExamplesMultiDataSet ( data . mapToPair ( new PairDataSetToMultiDataSetFn < K > ( ) ) , includeRegularizationTerms , batchSize ) ;
public class SNISSLExplorer { /** * struct { * NameType name _ type ; * select ( name _ type ) { * case host _ name : HostName ; * } name ; * } ServerName ; * enum { * host _ name ( 0 ) , ( 255) * } NameType ; * opaque HostName < 1 . . 2 ^ 16-1 > ; * struct { * ServerName server _ name _ list < 1 . . 2 ^ 16-1 > * } ServerNameList ; */ private static List < SNIServerName > exploreSNIExt ( ByteBuffer input , int extLen ) throws SSLException { } }
Map < Integer , SNIServerName > sniMap = new LinkedHashMap < > ( ) ; int remains = extLen ; if ( extLen >= 2 ) { // " server _ name " extension in ClientHello int listLen = getInt16 ( input ) ; // length of server _ name _ list if ( listLen == 0 || listLen + 2 != extLen ) { throw UndertowMessages . MESSAGES . invalidTlsExt ( ) ; } remains -= 2 ; // 0x02 : the length field of server _ name _ list while ( remains > 0 ) { int code = getInt8 ( input ) ; // name _ type int snLen = getInt16 ( input ) ; // length field of server name if ( snLen > remains ) { throw UndertowMessages . MESSAGES . notEnoughData ( ) ; } byte [ ] encoded = new byte [ snLen ] ; input . get ( encoded ) ; SNIServerName serverName ; switch ( code ) { case StandardConstants . SNI_HOST_NAME : if ( encoded . length == 0 ) { throw UndertowMessages . MESSAGES . emptyHostNameSni ( ) ; } serverName = new SNIHostName ( encoded ) ; break ; default : serverName = new UnknownServerName ( code , encoded ) ; } // check for duplicated server name type if ( sniMap . put ( serverName . getType ( ) , serverName ) != null ) { throw UndertowMessages . MESSAGES . duplicatedSniServerName ( serverName . getType ( ) ) ; } remains -= encoded . length + 3 ; // NameType : 1 byte // HostName length : 2 bytes } } else if ( extLen == 0 ) { // " server _ name " extension in ServerHello throw UndertowMessages . MESSAGES . invalidTlsExt ( ) ; } if ( remains != 0 ) { throw UndertowMessages . MESSAGES . invalidTlsExt ( ) ; } return Collections . unmodifiableList ( new ArrayList < > ( sniMap . values ( ) ) ) ;
public class NetworkProfilesInner { /** * Gets all network profiles in a resource group . * @ param nextPageLink The NextLink from the previous successful call to List operation . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the PagedList & lt ; NetworkProfileInner & gt ; object */ public Observable < Page < NetworkProfileInner > > listByResourceGroupNextAsync ( final String nextPageLink ) { } }
return listByResourceGroupNextWithServiceResponseAsync ( nextPageLink ) . map ( new Func1 < ServiceResponse < Page < NetworkProfileInner > > , Page < NetworkProfileInner > > ( ) { @ Override public Page < NetworkProfileInner > call ( ServiceResponse < Page < NetworkProfileInner > > response ) { return response . body ( ) ; } } ) ;
public class Customization { /** * Return true if actual value matches expected value using this * Customization ' s comparator . The equal method used for comparison depends * on type of comparator . * @ param prefix * JSON path of the JSON item being tested ( only used if * comparator is a LocationAwareValueMatcher ) * @ param actual * JSON value being tested * @ param expected * expected JSON value * @ param result * JSONCompareResult to which match failure may be passed ( only * used if comparator is a LocationAwareValueMatcher ) * @ return true if expected and actual equal or any difference has already * been passed to specified result instance , false otherwise . * @ throws ValueMatcherException * if expected and actual values not equal and ValueMatcher * needs to override default comparison failure message that * would be generated if this method returned false . */ public boolean matches ( String prefix , Object actual , Object expected , JSONCompareResult result ) throws ValueMatcherException { } }
if ( comparator instanceof LocationAwareValueMatcher ) { return ( ( LocationAwareValueMatcher < Object > ) comparator ) . equal ( prefix , actual , expected , result ) ; } return comparator . equal ( actual , expected ) ;
public class AdHoc_RW_SP { /** * System procedure run hook . * Use the base class implementation . * @ param ctx execution context * @ param partitionParam serialized partition parameter * @ param partitionParamType type of the partition parameter used to deserialize it * @ param serializedBatchData serialized data needed for adhoc * @ return results as VoltTable array */ public VoltTable [ ] run ( SystemProcedureExecutionContext ctx , byte [ ] partitionParam , byte partitionParamType , byte [ ] serializedBatchData ) { } }
return runAdHoc ( ctx , serializedBatchData ) ;
public class Help { /** * < pre > * URL ( s ) pointing to additional information on handling the current error . * < / pre > * < code > repeated . google . rpc . Help . Link links = 1 ; < / code > */ public java . util . List < com . google . rpc . Help . Link > getLinksList ( ) { } }
return links_ ;
public class ShardingDataSourceNames { /** * Get raw master data source name . * @ param dataSourceName data source name * @ return raw master data source name */ public String getRawMasterDataSourceName ( final String dataSourceName ) { } }
for ( MasterSlaveRuleConfiguration each : shardingRuleConfig . getMasterSlaveRuleConfigs ( ) ) { if ( each . getName ( ) . equals ( dataSourceName ) ) { return each . getMasterDataSourceName ( ) ; } } return dataSourceName ;
public class AppServiceCertificateOrdersInner { /** * Verify domain ownership for this certificate order . * Verify domain ownership for this certificate order . * @ param resourceGroupName Name of the resource group to which the resource belongs . * @ param certificateOrderName Name of the certificate order . * @ param siteSealRequest Site seal request . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ throws DefaultErrorResponseException thrown if the request is rejected by server * @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @ return the SiteSealInner object if successful . */ public SiteSealInner retrieveSiteSeal ( String resourceGroupName , String certificateOrderName , SiteSealRequest siteSealRequest ) { } }
return retrieveSiteSealWithServiceResponseAsync ( resourceGroupName , certificateOrderName , siteSealRequest ) . toBlocking ( ) . single ( ) . body ( ) ;
public class HelpTopicNode { /** * Inserts a child node at the specified position . * @ param node Child node to insert . * @ param index Insertion position ( - 1 to append ) . */ public void addChild ( HelpTopicNode node , int index ) { } }
node . detach ( ) ; node . parent = this ; if ( index < 0 ) { children . add ( node ) ; } else { children . add ( index , node ) ; }
public class ElementSelectors { /** * Accepts two elements if exactly on of the given ElementSelectors does . */ public static ElementSelector xor ( final ElementSelector es1 , final ElementSelector es2 ) { } }
if ( es1 == null || es2 == null ) { throw new IllegalArgumentException ( SELECTORS_MUST_NOT_BE_NULL ) ; } return new ElementSelector ( ) { @ Override public boolean canBeCompared ( Element controlElement , Element testElement ) { return es1 . canBeCompared ( controlElement , testElement ) ^ es2 . canBeCompared ( controlElement , testElement ) ; } } ;
public class TaskClient { /** * Retrieve information about the task * @ param taskId ID of the task * @ return Task details */ public Task getTaskDetails ( String taskId ) { } }
Preconditions . checkArgument ( StringUtils . isNotBlank ( taskId ) , "Task id cannot be blank" ) ; return protoMapper . fromProto ( stub . getTask ( TaskServicePb . GetTaskRequest . newBuilder ( ) . setTaskId ( taskId ) . build ( ) ) . getTask ( ) ) ;
public class UpdateThingGroupsForThingRequest { /** * The groups to which the thing will be added . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setThingGroupsToAdd ( java . util . Collection ) } or { @ link # withThingGroupsToAdd ( java . util . Collection ) } if you * want to override the existing values . * @ param thingGroupsToAdd * The groups to which the thing will be added . * @ return Returns a reference to this object so that method calls can be chained together . */ public UpdateThingGroupsForThingRequest withThingGroupsToAdd ( String ... thingGroupsToAdd ) { } }
if ( this . thingGroupsToAdd == null ) { setThingGroupsToAdd ( new java . util . ArrayList < String > ( thingGroupsToAdd . length ) ) ; } for ( String ele : thingGroupsToAdd ) { this . thingGroupsToAdd . add ( ele ) ; } return this ;
public class BobblePath { /** * documentation inherited from interface Path */ public boolean tick ( Pathable pable , long tickStamp ) { } }
// see if we need to stop if ( _stopTime <= tickStamp ) { boolean updated = updatePositionTo ( pable , _sx , _sy ) ; pable . pathCompleted ( tickStamp ) ; return updated ; } // see if it ' s time to move . . if ( _nextMove > tickStamp ) { return false ; } // when bobbling , it ' s bad form to bobble into the same position int newx , newy ; do { newx = _sx + RandomUtil . getInt ( _dx * 2 + 1 ) - _dx ; newy = _sy + RandomUtil . getInt ( _dy * 2 + 1 ) - _dy ; } while ( ! updatePositionTo ( pable , newx , newy ) ) ; // and update the next time to move _nextMove = tickStamp + _updateFreq ; return true ;
public class NodeManager { /** * return true if a new node has been added - else return false * @ param clusterNodeInfo the node that is heartbeating * @ return true if this is a new node that has been added , false otherwise */ public boolean heartbeat ( ClusterNodeInfo clusterNodeInfo ) throws DisallowedNode { } }
ClusterNode node = nameToNode . get ( clusterNodeInfo . name ) ; if ( ! canAllowNode ( clusterNodeInfo . getAddress ( ) . getHost ( ) ) ) { if ( node != null ) { node . heartbeat ( clusterNodeInfo ) ; } else { throw new DisallowedNode ( clusterNodeInfo . getAddress ( ) . getHost ( ) ) ; } return false ; } boolean newNode = false ; Map < ResourceType , String > currentResources = clusterNodeInfo . getResourceInfos ( ) ; if ( currentResources == null ) { currentResources = new EnumMap < ResourceType , String > ( ResourceType . class ) ; } if ( node == null ) { LOG . info ( "Adding node with heartbeat: " + clusterNodeInfo . toString ( ) ) ; node = new ClusterNode ( clusterNodeInfo , topologyCache . getNode ( clusterNodeInfo . address . host ) , cpuToResourcePartitioning ) ; addNode ( node , currentResources ) ; newNode = true ; } node . heartbeat ( clusterNodeInfo ) ; boolean appsChanged = false ; Map < ResourceType , String > prevResources = nameToApps . get ( clusterNodeInfo . name ) ; Set < ResourceType > deletedApps = null ; for ( Map . Entry < ResourceType , String > entry : prevResources . entrySet ( ) ) { String newAppInfo = currentResources . get ( entry . getKey ( ) ) ; String oldAppInfo = entry . getValue ( ) ; if ( newAppInfo == null || ! newAppInfo . equals ( oldAppInfo ) ) { if ( deletedApps == null ) { deletedApps = EnumSet . noneOf ( ResourceType . class ) ; } deletedApps . add ( entry . getKey ( ) ) ; appsChanged = true ; } } Map < ResourceType , String > addedApps = null ; for ( Map . Entry < ResourceType , String > entry : currentResources . entrySet ( ) ) { String newAppInfo = entry . getValue ( ) ; String oldAppInfo = prevResources . get ( entry . getKey ( ) ) ; if ( oldAppInfo == null || ! oldAppInfo . equals ( newAppInfo ) ) { if ( addedApps == null ) { addedApps = new EnumMap < ResourceType , String > ( ResourceType . class ) ; } addedApps . put ( entry . getKey ( ) , entry . getValue ( ) ) ; appsChanged = true ; } } if ( deletedApps != null ) { for ( ResourceType deleted : deletedApps ) { clusterManager . nodeAppRemoved ( clusterNodeInfo . name , deleted ) ; } } if ( addedApps != null ) { for ( Map . Entry < ResourceType , String > added : addedApps . entrySet ( ) ) { addAppToNode ( node , added . getKey ( ) , added . getValue ( ) ) ; } } updateRunnability ( node ) ; return newNode || appsChanged ;
public class TileGenerator { /** * Update the Content and Tile Matrix Set bounds * @ param tileMatrixSet * @ throws java . sql . SQLException */ private void updateTileBounds ( TileMatrixSet tileMatrixSet ) throws SQLException { } }
TileDao tileDao = geoPackage . getTileDao ( tileMatrixSet ) ; if ( tileDao . isGoogleTiles ( ) ) { if ( ! googleTiles ) { // If adding GeoPackage tiles to a Google Tile format , add them // as Google tiles googleTiles = true ; adjustGoogleBounds ( ) ; } } else if ( googleTiles ) { // Can ' t add Google formatted tiles to GeoPackage tiles throw new GeoPackageException ( "Can not add Google formatted tiles to " + tableName + " which already contains GeoPackage formatted tiles" ) ; } Projection tileMatrixProjection = tileMatrixSet . getSrs ( ) . getProjection ( ) ; if ( ! tileMatrixProjection . equals ( projection ) ) { throw new GeoPackageException ( "Can not update tiles projected at " + tileMatrixProjection . getCode ( ) + " with tiles projected at " + projection . getCode ( ) ) ; } Contents contents = tileMatrixSet . getContents ( ) ; // Combine the existing content and request bounding boxes BoundingBox previousContentsBoundingBox = contents . getBoundingBox ( ) ; if ( previousContentsBoundingBox != null ) { ProjectionTransform transformProjectionToContents = projection . getTransformation ( contents . getProjection ( ) ) ; BoundingBox contentsBoundingBox = boundingBox ; if ( ! transformProjectionToContents . isSameProjection ( ) ) { contentsBoundingBox = contentsBoundingBox . transform ( transformProjectionToContents ) ; } contentsBoundingBox = contentsBoundingBox . union ( previousContentsBoundingBox ) ; // Update the contents if modified if ( ! contentsBoundingBox . equals ( previousContentsBoundingBox ) ) { contents . setBoundingBox ( contentsBoundingBox ) ; ContentsDao contentsDao = geoPackage . getContentsDao ( ) ; contentsDao . update ( contents ) ; } } // If updating GeoPackage format tiles , all existing metadata and tile // rows needs to be adjusted if ( ! googleTiles ) { BoundingBox previousTileMatrixSetBoundingBox = tileMatrixSet . getBoundingBox ( ) ; // Adjust the bounds to include the request and existing bounds ProjectionTransform transformProjectionToTileMatrixSet = projection . getTransformation ( tileMatrixProjection ) ; boolean sameProjection = transformProjectionToTileMatrixSet . isSameProjection ( ) ; BoundingBox updateBoundingBox = tileBounds . get ( minZoom ) ; if ( ! sameProjection ) { updateBoundingBox = updateBoundingBox . transform ( transformProjectionToTileMatrixSet ) ; } int minNewOrUpdateZoom = Math . min ( minZoom , ( int ) tileDao . getMinZoom ( ) ) ; adjustBounds ( updateBoundingBox , minNewOrUpdateZoom ) ; // Update the tile matrix set if modified BoundingBox updateTileGridBoundingBox = tileGridBoundingBox ; if ( ! sameProjection ) { updateTileGridBoundingBox = updateTileGridBoundingBox . transform ( transformProjectionToTileMatrixSet ) ; } if ( ! previousTileMatrixSetBoundingBox . equals ( updateTileGridBoundingBox ) ) { updateTileGridBoundingBox = updateTileGridBoundingBox . union ( previousTileMatrixSetBoundingBox ) ; adjustBounds ( updateTileGridBoundingBox , minNewOrUpdateZoom ) ; updateTileGridBoundingBox = tileGridBoundingBox ; if ( ! sameProjection ) { updateTileGridBoundingBox = updateTileGridBoundingBox . transform ( transformProjectionToTileMatrixSet ) ; } tileMatrixSet . setBoundingBox ( updateTileGridBoundingBox ) ; TileMatrixSetDao tileMatrixSetDao = geoPackage . getTileMatrixSetDao ( ) ; tileMatrixSetDao . update ( tileMatrixSet ) ; } TileMatrixDao tileMatrixDao = geoPackage . getTileMatrixDao ( ) ; // Adjust the tile matrix metadata and tile rows at each existing // zoom level for ( long zoom = tileDao . getMinZoom ( ) ; zoom <= tileDao . getMaxZoom ( ) ; zoom ++ ) { TileMatrix tileMatrix = tileDao . getTileMatrix ( zoom ) ; if ( tileMatrix != null ) { // Determine the new width and height at this level long adjustment = ( long ) Math . pow ( 2 , zoom - minNewOrUpdateZoom ) ; long zoomMatrixWidth = matrixWidth * adjustment ; long zoomMatrixHeight = matrixHeight * adjustment ; // Get the zoom level tile rows , starting with highest rows // and columns so when updating we avoid constraint // violations TileResultSet tileResultSet = tileDao . queryForTileDescending ( zoom ) ; try { // Update each tile row at this zoom level while ( tileResultSet . moveToNext ( ) ) { TileRow tileRow = tileResultSet . getRow ( ) ; // Get the bounding box of the existing tile BoundingBox tileBoundingBox = TileBoundingBoxUtils . getBoundingBox ( previousTileMatrixSetBoundingBox , tileMatrix , tileRow . getTileColumn ( ) , tileRow . getTileRow ( ) ) ; // Get the mid lat and lon to find the new tile row // and column double midLatitude = tileBoundingBox . getMinLatitude ( ) + ( ( tileBoundingBox . getMaxLatitude ( ) - tileBoundingBox . getMinLatitude ( ) ) / 2.0 ) ; double midLongitude = tileBoundingBox . getMinLongitude ( ) + ( ( tileBoundingBox . getMaxLongitude ( ) - tileBoundingBox . getMinLongitude ( ) ) / 2.0 ) ; // Get the new tile row and column with regards to // the new bounding box long newTileRow = TileBoundingBoxUtils . getTileRow ( tileGridBoundingBox , zoomMatrixHeight , midLatitude ) ; long newTileColumn = TileBoundingBoxUtils . getTileColumn ( tileGridBoundingBox , zoomMatrixWidth , midLongitude ) ; // Update the tile row if ( tileRow . getTileRow ( ) != newTileRow || tileRow . getTileColumn ( ) != newTileColumn ) { tileRow . setTileRow ( newTileRow ) ; tileRow . setTileColumn ( newTileColumn ) ; tileDao . update ( tileRow ) ; } } } finally { tileResultSet . close ( ) ; } // Calculate the pixel size double pixelXSize = ( tileGridBoundingBox . getMaxLongitude ( ) - tileGridBoundingBox . getMinLongitude ( ) ) / zoomMatrixWidth / tileMatrix . getTileWidth ( ) ; double pixelYSize = ( tileGridBoundingBox . getMaxLatitude ( ) - tileGridBoundingBox . getMinLatitude ( ) ) / zoomMatrixHeight / tileMatrix . getTileHeight ( ) ; // Update the tile matrix tileMatrix . setMatrixWidth ( zoomMatrixWidth ) ; tileMatrix . setMatrixHeight ( zoomMatrixHeight ) ; tileMatrix . setPixelXSize ( pixelXSize ) ; tileMatrix . setPixelYSize ( pixelYSize ) ; tileMatrixDao . update ( tileMatrix ) ; } } // Adjust the width and height to the min zoom level of the // request if ( minNewOrUpdateZoom < minZoom ) { long adjustment = ( long ) Math . pow ( 2 , minZoom - minNewOrUpdateZoom ) ; matrixWidth *= adjustment ; matrixHeight *= adjustment ; } }
public class UnionSet { /** * Triggers combining . */ @ Override public < T > T [ ] toArray ( T [ ] a ) { } }
combine ( ) ; if ( combined == null ) { Set < E > emptySet = java . util . Collections . emptySet ( ) ; return emptySet . toArray ( a ) ; } return combined . toArray ( a ) ;
public class StructureImpl { /** * { @ inheritDoc } */ @ Override public Chain findChain ( String chainName , int modelnr ) throws StructureException { } }
return getChainByPDB ( chainName , modelnr ) ;
public class Indices { /** * Calculate min and max message timestamps in the given index . * @ param index Name of the index to query . * @ return the timestamp stats in the given index , or { @ code null } if they couldn ' t be calculated . * @ see org . elasticsearch . search . aggregations . metrics . stats . Stats */ public IndexRangeStats indexRangeStatsOfIndex ( String index ) { } }
final FilterAggregationBuilder builder = AggregationBuilders . filter ( "agg" , QueryBuilders . existsQuery ( Message . FIELD_TIMESTAMP ) ) . subAggregation ( AggregationBuilders . min ( "ts_min" ) . field ( Message . FIELD_TIMESTAMP ) ) . subAggregation ( AggregationBuilders . max ( "ts_max" ) . field ( Message . FIELD_TIMESTAMP ) ) . subAggregation ( AggregationBuilders . terms ( "streams" ) . field ( Message . FIELD_STREAMS ) ) ; final String query = searchSource ( ) . aggregation ( builder ) . size ( 0 ) . toString ( ) ; final Search request = new Search . Builder ( query ) . addIndex ( index ) . setSearchType ( SearchType . DFS_QUERY_THEN_FETCH ) . ignoreUnavailable ( true ) . build ( ) ; if ( LOG . isDebugEnabled ( ) ) { String data = "{}" ; try { data = request . getData ( objectMapper . copy ( ) . enable ( SerializationFeature . INDENT_OUTPUT ) ) ; } catch ( IOException e ) { LOG . debug ( "Couldn't pretty print request payload" , e ) ; } LOG . debug ( "Index range query: _search/{}: {}" , index , data ) ; } final SearchResult result = JestUtils . execute ( jestClient , request , ( ) -> "Couldn't build index range of index " + index ) ; final FilterAggregation f = result . getAggregations ( ) . getFilterAggregation ( "agg" ) ; if ( f == null ) { throw new IndexNotFoundException ( "Couldn't build index range of index " + index + " because it doesn't exist." ) ; } else if ( f . getCount ( ) == 0L ) { LOG . debug ( "No documents with attribute \"timestamp\" found in index <{}>" , index ) ; return IndexRangeStats . EMPTY ; } final MinAggregation minAgg = f . getMinAggregation ( "ts_min" ) ; final DateTime min = new DateTime ( minAgg . getMin ( ) . longValue ( ) , DateTimeZone . UTC ) ; final MaxAggregation maxAgg = f . getMaxAggregation ( "ts_max" ) ; final DateTime max = new DateTime ( maxAgg . getMax ( ) . longValue ( ) , DateTimeZone . UTC ) ; // make sure we return an empty list , so we can differentiate between old indices that don ' t have this information // and newer ones that simply have no streams . final TermsAggregation streams = f . getTermsAggregation ( "streams" ) ; final List < String > streamIds = streams . getBuckets ( ) . stream ( ) . map ( TermsAggregation . Entry :: getKeyAsString ) . collect ( toList ( ) ) ; return IndexRangeStats . create ( min , max , streamIds ) ;
public class SearchSort { /** * Sort by geo location . * @ param locationLon longitude of the location . * @ param locationLat latitude of the location . * @ param field the field name . */ public static SearchSortGeoDistance sortGeoDistance ( double locationLon , double locationLat , String field ) { } }
return new SearchSortGeoDistance ( locationLon , locationLat , field ) ;
public class Interceptors { /** * Creates a binary interceptor chain . * @ param < T1 > the function first parameter type * @ param < T2 > the function second parameter type * @ param < I > the binary interceptor type * @ param < R > the function result type * @ param innermost the function to be intercepted * @ param interceptors an iterable of interceptors * @ return the resulting function */ public static < T1 , T2 , I extends BinaryInterceptor < T1 , T2 > , R > BiFunction < T1 , T2 , R > intercept ( BiFunction < T1 , T2 , R > innermost , Iterable < I > interceptors ) { } }
dbc . precondition ( interceptors != null , "cannot create an interceptor chain with a null iterable of interceptors" ) ; return new BinaryInterceptorChain < > ( innermost , interceptors . iterator ( ) ) ;
public class Proxies { /** * Create http proxy */ public static Proxy httpProxy ( String host , int port ) { } }
return new Proxy ( Proxy . Type . HTTP , new InetSocketAddress ( Objects . requireNonNull ( host ) , port ) ) ;
public class Assistant { /** * Create dialog node . * Create a new dialog node . * This operation is limited to 500 requests per 30 minutes . For more information , see * * Rate limiting * * . * @ param createDialogNodeOptions the { @ link CreateDialogNodeOptions } containing the options for the call * @ return a { @ link ServiceCall } with a response type of { @ link DialogNode } */ public ServiceCall < DialogNode > createDialogNode ( CreateDialogNodeOptions createDialogNodeOptions ) { } }
Validator . notNull ( createDialogNodeOptions , "createDialogNodeOptions cannot be null" ) ; String [ ] pathSegments = { "v1/workspaces" , "dialog_nodes" } ; String [ ] pathParameters = { createDialogNodeOptions . workspaceId ( ) } ; RequestBuilder builder = RequestBuilder . post ( RequestBuilder . constructHttpUrl ( getEndPoint ( ) , pathSegments , pathParameters ) ) ; builder . query ( "version" , versionDate ) ; Map < String , String > sdkHeaders = SdkCommon . getSdkHeaders ( "conversation" , "v1" , "createDialogNode" ) ; for ( Entry < String , String > header : sdkHeaders . entrySet ( ) ) { builder . header ( header . getKey ( ) , header . getValue ( ) ) ; } builder . header ( "Accept" , "application/json" ) ; final JsonObject contentJson = new JsonObject ( ) ; contentJson . addProperty ( "dialog_node" , createDialogNodeOptions . dialogNode ( ) ) ; if ( createDialogNodeOptions . description ( ) != null ) { contentJson . addProperty ( "description" , createDialogNodeOptions . description ( ) ) ; } if ( createDialogNodeOptions . conditions ( ) != null ) { contentJson . addProperty ( "conditions" , createDialogNodeOptions . conditions ( ) ) ; } if ( createDialogNodeOptions . parent ( ) != null ) { contentJson . addProperty ( "parent" , createDialogNodeOptions . parent ( ) ) ; } if ( createDialogNodeOptions . previousSibling ( ) != null ) { contentJson . addProperty ( "previous_sibling" , createDialogNodeOptions . previousSibling ( ) ) ; } if ( createDialogNodeOptions . output ( ) != null ) { contentJson . add ( "output" , GsonSingleton . getGson ( ) . toJsonTree ( createDialogNodeOptions . output ( ) ) ) ; } if ( createDialogNodeOptions . context ( ) != null ) { contentJson . add ( "context" , GsonSingleton . getGson ( ) . toJsonTree ( createDialogNodeOptions . context ( ) ) ) ; } if ( createDialogNodeOptions . metadata ( ) != null ) { contentJson . add ( "metadata" , GsonSingleton . getGson ( ) . toJsonTree ( createDialogNodeOptions . metadata ( ) ) ) ; } if ( createDialogNodeOptions . nextStep ( ) != null ) { contentJson . add ( "next_step" , GsonSingleton . getGson ( ) . toJsonTree ( createDialogNodeOptions . nextStep ( ) ) ) ; } if ( createDialogNodeOptions . title ( ) != null ) { contentJson . addProperty ( "title" , createDialogNodeOptions . title ( ) ) ; } if ( createDialogNodeOptions . nodeType ( ) != null ) { contentJson . addProperty ( "type" , createDialogNodeOptions . nodeType ( ) ) ; } if ( createDialogNodeOptions . eventName ( ) != null ) { contentJson . addProperty ( "event_name" , createDialogNodeOptions . eventName ( ) ) ; } if ( createDialogNodeOptions . variable ( ) != null ) { contentJson . addProperty ( "variable" , createDialogNodeOptions . variable ( ) ) ; } if ( createDialogNodeOptions . actions ( ) != null ) { contentJson . add ( "actions" , GsonSingleton . getGson ( ) . toJsonTree ( createDialogNodeOptions . actions ( ) ) ) ; } if ( createDialogNodeOptions . digressIn ( ) != null ) { contentJson . addProperty ( "digress_in" , createDialogNodeOptions . digressIn ( ) ) ; } if ( createDialogNodeOptions . digressOut ( ) != null ) { contentJson . addProperty ( "digress_out" , createDialogNodeOptions . digressOut ( ) ) ; } if ( createDialogNodeOptions . digressOutSlots ( ) != null ) { contentJson . addProperty ( "digress_out_slots" , createDialogNodeOptions . digressOutSlots ( ) ) ; } if ( createDialogNodeOptions . userLabel ( ) != null ) { contentJson . addProperty ( "user_label" , createDialogNodeOptions . userLabel ( ) ) ; } builder . bodyJson ( contentJson ) ; return createServiceCall ( builder . build ( ) , ResponseConverterUtils . getObject ( DialogNode . class ) ) ;
public class Assert { /** * Assert that an array has elements ; that is , it must not be * { @ code null } and must have at least one element . * < pre class = " code " > Assert . notEmpty ( array , " The array must have elements " ) ; < / pre > * @ param array the array to check * @ param message the exception message to use if the assertion fails * @ throws java . lang . IllegalArgumentException if the object array is { @ code null } or has no elements */ public static void notEmpty ( Object [ ] array , String message ) { } }
if ( ArrayUtils . isEmpty ( array ) ) { throw new IllegalArgumentException ( message ) ; }
public class StrTokenizer { /** * Creates a new instance of this Tokenizer . The new instance is reset so that * it will be at the start of the token list . * @ return a new instance of this Tokenizer which has been reset . * @ throws CloneNotSupportedException if there is a problem cloning */ Object cloneReset ( ) throws CloneNotSupportedException { } }
// this method exists to enable 100 % test coverage final StrTokenizer cloned = new StrTokenizer ( ) ; if ( this . chars != null ) { cloned . chars = new char [ this . chars . length ] ; for ( int i = 0 ; i < this . chars . length ; i ++ ) { cloned . chars [ i ] = this . chars [ i ] ; } } if ( this . tokens == null ) { cloned . tokens = null ; } else { cloned . tokens = this . getTokenArray ( ) ; } cloned . tokenPos = this . tokenPos ; cloned . emptyAsNull = this . emptyAsNull ; cloned . ignoreEmptyTokens = this . ignoreEmptyTokens ; cloned . delimMatcher = this . delimMatcher ; cloned . quoteMatcher = this . quoteMatcher ; cloned . ignoredMatcher = this . ignoredMatcher ; cloned . trimmerMatcher = this . trimmerMatcher ; cloned . reset ( ) ; return cloned ;
public class VertxServerWebSocket { /** * { @ link org . vertx . java . core . http . ServerWebSocket } is available . */ @ Override public < T > T unwrap ( Class < T > clazz ) { } }
return org . vertx . java . core . http . ServerWebSocket . class . isAssignableFrom ( clazz ) ? clazz . cast ( socket ) : null ;
public class GamePlayerBenchmark { private static void gameplay ( Random r , GraphicalModel model , ConcatVector weights , ConcatVector [ ] humanFeatureVectors ) { } }
List < Integer > variablesList = new ArrayList < > ( ) ; List < Integer > variableSizesList = new ArrayList < > ( ) ; for ( GraphicalModel . Factor f : model . factors ) { for ( int i = 0 ; i < f . neigborIndices . length ; i ++ ) { int j = f . neigborIndices [ i ] ; if ( ! variablesList . contains ( j ) ) { variablesList . add ( j ) ; variableSizesList . add ( f . featuresTable . getDimensions ( ) [ i ] ) ; } } } int [ ] variables = variablesList . stream ( ) . mapToInt ( i -> i ) . toArray ( ) ; int [ ] variableSizes = variableSizesList . stream ( ) . mapToInt ( i -> i ) . toArray ( ) ; List < SampleState > childrenOfRoot = new ArrayList < > ( ) ; CliqueTree tree = new CliqueTree ( model , weights ) ; int initialFactors = model . factors . size ( ) ; // Run some " samples " long start = System . currentTimeMillis ( ) ; long marginalsTime = 0 ; for ( int i = 0 ; i < 1000 ; i ++ ) { System . err . println ( "\tTaking sample " + i ) ; Stack < SampleState > stack = new Stack < > ( ) ; SampleState state = selectOrCreateChildAtRandom ( r , model , variables , variableSizes , childrenOfRoot , humanFeatureVectors ) ; long localMarginalsTime = 0 ; // Each " sample " is 10 moves deep for ( int j = 0 ; j < 10 ; j ++ ) { // System . err . println ( " \ t \ tFrame " + j ) ; state . push ( model ) ; assert ( model . factors . size ( ) == initialFactors + j + 1 ) ; // This is the thing we ' re really benchmarking if ( state . cachedMarginal == null ) { long s = System . currentTimeMillis ( ) ; state . cachedMarginal = tree . calculateMarginalsJustSingletons ( ) ; localMarginalsTime += System . currentTimeMillis ( ) - s ; } stack . push ( state ) ; state = selectOrCreateChildAtRandom ( r , model , variables , variableSizes , state . children , humanFeatureVectors ) ; } System . err . println ( "\t\t" + localMarginalsTime + " ms" ) ; marginalsTime += localMarginalsTime ; while ( ! stack . empty ( ) ) { stack . pop ( ) . pop ( model ) ; } assert ( model . factors . size ( ) == initialFactors ) ; } System . err . println ( "Marginals time: " + marginalsTime + " ms" ) ; System . err . println ( "Avg time per marginal: " + ( marginalsTime / 200 ) + " ms" ) ; System . err . println ( "Total time: " + ( System . currentTimeMillis ( ) - start ) ) ;
public class MoveDataBetweenMarklogicDBs { /** * Main function which exports the data from one database to another . */ public void moveDataBetweenMarklogicDBs ( ) { } }
// Empty and query to retrieve the entire set of documents . This can be // replaced with any cts query narrowing the scope of documents to be // exported . String rawSearch = new StringBuilder ( ) . append ( "<search:search " ) . append ( "xmlns:search='http://marklogic.com/appservices/search' xmlns:cts='http://marklogic.com/cts'>" ) . append ( ctsQuery ) . append ( "</search:search>" ) . toString ( ) ; ServerTransform transform = null ; if ( ! transformName . equals ( "" ) ) { transform = new ServerTransform ( transformName ) ; } QueryManager queryMgr = sourceClient . newQueryManager ( ) ; RawCombinedQueryDefinition queryDef = queryMgr . newRawCombinedQueryDefinitionAs ( Format . XML , rawSearch ) ; // WriteBatcher for the destination DB to write the documents WriteBatcher destWriteBatcher = destMoveMgr . newWriteBatcher ( ) . withBatchSize ( batchSize ) . withThreadCount ( threadCount ) . onBatchSuccess ( batch -> System . out . println ( "Written " + batch . getJobWritesSoFar ( ) + " documents into the target database" ) ) . onBatchFailure ( ( batch , throwable ) -> throwable . printStackTrace ( ) ) ; destMoveMgr . startJob ( destWriteBatcher ) ; ExportListener exportListener = new ExportListener ( ) . withConsistentSnapshot ( ) . onDocumentReady ( record -> { /* * This is where you do a client side map ( write one or more documents * per document read ) or reduce ( write one document per many documents * read ) or filter ( write certain documents which match a criteria ) * Here we do a filter to export only customer documents . */ if ( record . getUri ( ) . contains ( "customer" ) ) { destWriteBatcher . add ( "/exported" + record . getUri ( ) , record . getMetadata ( new StringHandle ( ) ) , record . getContent ( new StringHandle ( ) ) . withFormat ( record . getFormat ( ) ) ) ; } } ) . withMetadataCategory ( Metadata . COLLECTIONS ) ; // Read the documents with transform if a transform is specified - ( in // database transform ) if ( transform != null ) { exportListener . withTransform ( transform ) ; } // QueryBatcher for the source DB to query the documents to be exported . QueryBatcher sourceQueryBatcher = sourceMoveMgr . newQueryBatcher ( queryDef ) . withBatchSize ( batchSize ) . withThreadCount ( threadCount ) . onUrisReady ( exportListener ) . onQueryFailure ( exception -> exception . printStackTrace ( ) ) ; sourceMoveMgr . startJob ( sourceQueryBatcher ) ; // Wait till the query batcher completes . sourceQueryBatcher . awaitCompletion ( ) ; sourceMoveMgr . stopJob ( sourceQueryBatcher ) ; // Wait till the write batcher completes destWriteBatcher . flushAndWait ( ) ; destMoveMgr . stopJob ( destWriteBatcher ) ;
public class BatchSuspendUserResult { /** * If the < a > BatchSuspendUser < / a > action fails for one or more of the user IDs in the request , a list of the user * IDs is returned , along with error codes and error messages . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setUserErrors ( java . util . Collection ) } or { @ link # withUserErrors ( java . util . Collection ) } if you want to * override the existing values . * @ param userErrors * If the < a > BatchSuspendUser < / a > action fails for one or more of the user IDs in the request , a list of the * user IDs is returned , along with error codes and error messages . * @ return Returns a reference to this object so that method calls can be chained together . */ public BatchSuspendUserResult withUserErrors ( UserError ... userErrors ) { } }
if ( this . userErrors == null ) { setUserErrors ( new java . util . ArrayList < UserError > ( userErrors . length ) ) ; } for ( UserError ele : userErrors ) { this . userErrors . add ( ele ) ; } return this ;
public class ProgressInformationPanel { /** * Prints stacktraces to the string writer , and investigates the throwable * hierarchy to check if there ' s any { @ link SQLException } s which also has * " next " exceptions . * @ param printWriter * @ param throwable */ protected void printStackTrace ( final PrintWriter printWriter , final Throwable throwable ) { } }
throwable . printStackTrace ( printWriter ) ; Throwable cause = throwable . getCause ( ) ; while ( cause != null ) { if ( cause instanceof SQLException ) { final SQLException nextException = ( ( SQLException ) cause ) . getNextException ( ) ; if ( nextException != null ) { printWriter . print ( "Next exception: " ) ; printStackTrace ( printWriter , nextException ) ; } } cause = cause . getCause ( ) ; }
public class KeyPairWriter { /** * Write the key pair into the output files . * @ param pair the key pair to write . * @ param format that can be { @ link IOFormat # BINARY } or { @ link IOFormat # BASE64 } . Using { @ link IOFormat # STRING } * will throw exception as keys , as opposed to licenses , cannot be saved in string format . * @ throws IOException when the underlying media cannot be written */ public void write ( LicenseKeyPair pair , IOFormat format ) throws IOException { } }
switch ( format ) { case BINARY : osPrivate . write ( pair . getPrivate ( ) ) ; osPublic . write ( pair . getPublic ( ) ) ; return ; case BASE64 : osPrivate . write ( Base64 . getEncoder ( ) . encode ( pair . getPrivate ( ) ) ) ; osPublic . write ( Base64 . getEncoder ( ) . encode ( pair . getPublic ( ) ) ) ; return ; } throw new IllegalArgumentException ( "Key format " + format + " is unknown." ) ;
public class FbBotMillNetworkController { /** * POSTs a message as a JSON string to Facebook . * @ param recipient * the recipient * @ param type * the type * @ param file * the file */ public static void postFormDataMessage ( String recipient , AttachmentType type , File file ) { } }
String pageToken = FbBotMillContext . getInstance ( ) . getPageToken ( ) ; // If the page token is invalid , returns . if ( ! validatePageToken ( pageToken ) ) { return ; } // TODO : add checks for valid attachmentTypes ( FILE , AUDIO or VIDEO ) HttpPost post = new HttpPost ( FbBotMillNetworkConstants . FACEBOOK_BASE_URL + FbBotMillNetworkConstants . FACEBOOK_MESSAGES_URL + pageToken ) ; FileBody filedata = new FileBody ( file ) ; StringBody recipientPart = new StringBody ( "{\"id\":\"" + recipient + "\"}" , ContentType . MULTIPART_FORM_DATA ) ; StringBody messagePart = new StringBody ( "{\"attachment\":{\"type\":\"" + type . name ( ) . toLowerCase ( ) + "\", \"payload\":{}}}" , ContentType . MULTIPART_FORM_DATA ) ; MultipartEntityBuilder builder = MultipartEntityBuilder . create ( ) ; builder . setMode ( HttpMultipartMode . STRICT ) ; builder . addPart ( "recipient" , recipientPart ) ; builder . addPart ( "message" , messagePart ) ; // builder . addPart ( " filedata " , filedata ) ; builder . addBinaryBody ( "filedata" , file ) ; builder . setContentType ( ContentType . MULTIPART_FORM_DATA ) ; // builder . setBoundary ( " - - - - WebKitFormBoundary7MA4YWxkTrZu0gW " ) ; HttpEntity entity = builder . build ( ) ; post . setEntity ( entity ) ; // Logs the raw JSON for debug purposes . BufferedReader br ; // post . addHeader ( " Content - Type " , " multipart / form - data " ) ; try { // br = new BufferedReader ( new InputStreamReader ( Header [ ] allHeaders = post . getAllHeaders ( ) ; for ( Header h : allHeaders ) { logger . debug ( "Header {} -> {}" , h . getName ( ) , h . getValue ( ) ) ; } // String output = br . readLine ( ) ; } catch ( Exception e ) { e . printStackTrace ( ) ; } // postInternal ( post ) ;
public class LeverageBrowserCacheFilter { /** * Execute the filter by appending the < b > Expires < / b > and * < b > Cache - Control < / b > response headers . * @ param servletRequest * the incoming { @ link ServletRequest } instance * @ param servletResponse * the outgoing { @ link ServletResponse } instance * @ param filterChain * the { @ link FilterChain } being executed * @ throws IOException * if something fails * @ throws ServletException * if something fails * @ see javax . servlet . Filter # doFilter ( javax . servlet . ServletRequest , * javax . servlet . ServletResponse , javax . servlet . FilterChain ) */ @ Override public void doFilter ( ServletRequest servletRequest , ServletResponse servletResponse , FilterChain filterChain ) throws IOException , ServletException { } }
HttpServletRequest request = ( HttpServletRequest ) servletRequest ; String uri = request . getRequestURI ( ) ; // check if this is a servletRequest to a static resource , like images / css / js // if yes , add the servletResponse header boolean staticResource = isStaticResource ( uri ) ; filterChain . doFilter ( servletRequest , servletResponse ) ; // if static resources if ( staticResource ) { LOGGER . debug ( "Marking URI: {} as a static resource" , uri ) ; HttpServletResponse response = ( HttpServletResponse ) servletResponse ; response . addDateHeader ( "Expires" , System . currentTimeMillis ( ) + DateUtils . ONE_YEAR ) ; response . addHeader ( "Cache-Control" , "public, max-age=" + ONE_YEAR_AS_SECONDS ) ; // turn the line below to check if a resource was cached due to this filter // response . addHeader ( " X - Filter " , " LeverageBrowserCache " ) ; }
public class JSModuleGraph { /** * Returns the deepest common dependency of the given modules . */ public JSModule getDeepestCommonDependencyInclusive ( Collection < JSModule > modules ) { } }
Iterator < JSModule > iter = modules . iterator ( ) ; JSModule dep = iter . next ( ) ; while ( iter . hasNext ( ) ) { dep = getDeepestCommonDependencyInclusive ( dep , iter . next ( ) ) ; } return dep ;
public class InternalFeaturePropertyAccessor { /** * { @ inheritDoc } */ public TypedValue read ( EvaluationContext context , Object target , String name ) throws AccessException { } }
if ( target == null ) { throw new AccessException ( "Cannot read property of null target" ) ; } if ( target instanceof InternalFeature ) { InternalFeature feature = ( InternalFeature ) target ; if ( feature . getAttributes ( ) . containsKey ( name ) ) { Attribute < ? > attribute = feature . getAttributes ( ) . get ( name ) ; return new TypedValue ( attribute . getValue ( ) ) ; } else if ( ID_PROPERTY_NAME . equalsIgnoreCase ( ID_PROPERTY_NAME ) ) { return new TypedValue ( feature . getId ( ) ) ; } else { throw new AccessException ( "Unknown attribute " + name + "for layer " + feature . getLayer ( ) . getId ( ) ) ; } } else if ( target instanceof AssociationValue ) { AssociationValue associationValue = ( AssociationValue ) target ; if ( associationValue . getAllAttributes ( ) . containsKey ( name ) ) { Attribute < ? > attribute = associationValue . getAllAttributes ( ) . get ( name ) ; return new TypedValue ( attribute . getValue ( ) ) ; } else if ( ID_PROPERTY_NAME . equalsIgnoreCase ( ID_PROPERTY_NAME ) ) { Attribute < ? > attribute = associationValue . getId ( ) ; return new TypedValue ( attribute . getValue ( ) ) ; } else { throw new AccessException ( "Unknown attribute " + name + " for association " + target ) ; } } else { throw new AccessException ( "Cannot read property " + name + "from class " + target . getClass ( ) ) ; }
public class NameSpace { /** * This method simply delegates to This . invokeMethod ( ) ; . * @ param methodName the method name * @ param args the args * @ param interpreter the interpreter * @ param callstack the callstack * @ param callerInfo the caller info * @ return the object * @ throws EvalError the eval error * @ see bsh . This . invokeMethod ( String methodName , Object [ ] args , * Interpreter interpreter , CallStack callstack , SimpleNode * callerInfo ) */ public Object invokeMethod ( final String methodName , final Object [ ] args , final Interpreter interpreter , final CallStack callstack , final SimpleNode callerInfo ) throws EvalError { } }
return this . getThis ( interpreter ) . invokeMethod ( methodName , args , interpreter , callstack , callerInfo , false /* declaredOnly */ ) ;
public class Utils { /** * Logs target object error */ static void logE ( Object targetObj , String msg ) { } }
Log . e ( TAG , toLogStr ( targetObj , msg ) ) ;
public class ConsoleAnnotator { /** * Cast operation that restricts T . */ @ SuppressWarnings ( "unchecked" ) public static < T > ConsoleAnnotator < T > cast ( ConsoleAnnotator < ? super T > a ) { } }
return ( ConsoleAnnotator ) a ;
public class AnnotationUtility { /** * Estract from an annotation of a property the attribute value specified . * @ param property property to analyze * @ param annotationClass annotation to analyze * @ param attribute the attribute * @ return attribute value as list of string */ public static String extractAsEnumerationValue ( ModelProperty property , ModelAnnotation annotationClass , AnnotationAttributeType attribute ) { } }
final Elements elementUtils = BaseProcessor . elementUtils ; final One < String > result = new One < String > ( ) ; extractAttributeValue ( elementUtils , property . getElement ( ) , annotationClass . getName ( ) , attribute , new OnAttributeFoundListener ( ) { @ Override public void onFound ( String value ) { result . value0 = value . substring ( value . lastIndexOf ( "." ) + 1 ) ; } } ) ; return result . value0 ;
public class PolicyInformation { /** * PolicyInformation : : = SEQUENCE { * policyIdentifier CertPolicyId , * policyQualifiers SEQUENCE SIZE ( 1 . . MAX ) OF * PolicyQualifierInfo OPTIONAL } */ public DERObject toASN1Object ( ) { } }
ASN1EncodableVector v = new ASN1EncodableVector ( ) ; v . add ( policyIdentifier ) ; if ( policyQualifiers != null ) { v . add ( policyQualifiers ) ; } return new DERSequence ( v ) ;
public class JPEGQuality { /** * Determines an approximate JPEG compression quality value from the quantization tables . * The value will be in the range { @ code [ 0 . . . 1 ] } , where { @ code 1 } is the best possible value . * @ param input an image input stream containing JPEG data . * @ return a float in the range { @ code [ 0 . . . 1 ] } , representing the JPEG quality , * or { @ code - 1 } if the quality can ' t be determined . * @ throws IIOException if a JPEG format error is found during parsing . * @ throws IOException if an I / O exception occurs during parsing . * @ see javax . imageio . plugins . jpeg . JPEGImageWriteParam # setCompressionQuality ( float ) * @ see JPEG # DQT */ public static float getJPEGQuality ( final ImageInputStream input ) throws IOException { } }
return getJPEGQuality ( JPEGSegmentUtil . readSegments ( input , JPEG . DQT , null ) ) ;
public class JavacHandlerUtil { /** * In javac , dotted access of any kind , from { @ code java . lang . String } to { @ code var . methodName } * is represented by a fold - left of { @ code Select } nodes with the leftmost string represented by * a { @ code Ident } node . This method generates such an expression . * For example , maker . Select ( maker . Select ( maker . Ident ( NAME [ java ] ) , NAME [ lang ] ) , NAME [ String ] ) . * @ see com . sun . tools . javac . tree . JCTree . JCIdent * @ see com . sun . tools . javac . tree . JCTree . JCFieldAccess */ public static JCExpression chainDotsString ( JavacNode node , String elems ) { } }
return chainDots ( node , null , null , elems . split ( "\\." ) ) ;
public class xen_health_resource { /** * < pre > * Converts API response of bulk operation into object and returns the object array in case of get request . * < / pre > */ protected base_resource [ ] get_nitro_bulk_response ( nitro_service service , String response ) throws Exception { } }
xen_health_resource_responses result = ( xen_health_resource_responses ) service . get_payload_formatter ( ) . string_to_resource ( xen_health_resource_responses . class , response ) ; if ( result . errorcode != 0 ) { if ( result . errorcode == SESSION_NOT_EXISTS ) service . clear_session ( ) ; throw new nitro_exception ( result . message , result . errorcode , ( base_response [ ] ) result . xen_health_resource_response_array ) ; } xen_health_resource [ ] result_xen_health_resource = new xen_health_resource [ result . xen_health_resource_response_array . length ] ; for ( int i = 0 ; i < result . xen_health_resource_response_array . length ; i ++ ) { result_xen_health_resource [ i ] = result . xen_health_resource_response_array [ i ] . xen_health_resource [ 0 ] ; } return result_xen_health_resource ;
public class JsonDBTemplate { /** * / * ( non - Javadoc ) * @ see io . jsondb . JsonDBOperations # createCollection ( java . lang . String ) */ @ Override public < T > void createCollection ( String collectionName ) { } }
CollectionMetaData cmd = cmdMap . get ( collectionName ) ; if ( null == cmd ) { throw new InvalidJsonDbApiUsageException ( "No class found with @Document Annotation and attribute collectionName as: " + collectionName ) ; } @ SuppressWarnings ( "unchecked" ) Map < Object , T > collection = ( Map < Object , T > ) collectionsRef . get ( ) . get ( collectionName ) ; if ( null != collection ) { throw new InvalidJsonDbApiUsageException ( "Collection by name '" + collectionName + "' already exists." ) ; } cmd . getCollectionLock ( ) . writeLock ( ) . lock ( ) ; // Some other thread might have created same collection when this thread reached this point if ( collectionsRef . get ( ) . get ( collectionName ) != null ) { return ; } try { String collectionFileName = collectionName + ".json" ; File fileObject = new File ( dbConfig . getDbFilesLocation ( ) , collectionFileName ) ; try { fileObject . createNewFile ( ) ; } catch ( IOException e ) { logger . error ( "IO Exception creating the collection file {}" , collectionFileName , e ) ; throw new InvalidJsonDbApiUsageException ( "Unable to create a collection file for collection: " + collectionName ) ; } if ( Util . stampVersion ( dbConfig , fileObject , cmd . getSchemaVersion ( ) ) ) { collection = new LinkedHashMap < Object , T > ( ) ; collectionsRef . get ( ) . put ( collectionName , collection ) ; contextsRef . get ( ) . put ( collectionName , JXPathContext . newContext ( collection . values ( ) ) ) ; fileObjectsRef . get ( ) . put ( collectionName , fileObject ) ; cmd . setActualSchemaVersion ( cmd . getSchemaVersion ( ) ) ; } else { fileObject . delete ( ) ; throw new JsonDBException ( "Failed to stamp version for collection: " + collectionName ) ; } } finally { cmd . getCollectionLock ( ) . writeLock ( ) . unlock ( ) ; }
public class DefaultCommandRegistry { /** * { @ inheritDoc } */ public boolean containsCommand ( String commandId ) { } }
Assert . notNull ( commandId , "commandId" ) ; if ( this . commandMap . containsKey ( commandId ) ) { return true ; } if ( this . parent != null ) { return this . parent . containsCommand ( commandId ) ; } return false ;
public class Stream { /** * This aggregator operation computes the minimum of tuples by the given { @ code inputFieldName } and it is * assumed that its value is an instance of { @ code Comparable } . If the value of tuple with field { @ code inputFieldName } is not an * instance of { @ code Comparable } then it throws { @ code ClassCastException } * @ param inputFieldName input field name * @ return the new stream with this operation . */ public Stream minBy ( String inputFieldName ) { } }
Aggregator < ComparisonAggregator . State > min = new Min ( inputFieldName ) ; return comparableAggregateStream ( inputFieldName , min ) ;
public class DirectedGraph { /** * Add a vertex to the graph . Nothing happens if vertex is already in graph . */ public void addVertex ( V vertex ) { } }
if ( containsVertex ( vertex ) ) { return ; } neighbors . put ( vertex , new ArrayList < V > ( ) ) ;
public class PalDB { /** * Creates a store writer with the specified < code > file < / code > as destination . * The parent folder is created if missing . * @ param file location of the output file * @ param config configuration * @ return a store writer */ public static StoreWriter createWriter ( File file , Configuration config ) { } }
return StoreImpl . createWriter ( file , config ) ;
public class CmsSetupXmlHelper { /** * Unmarshals ( reads ) an XML string into a new document . < p > * @ param xml the XML code to unmarshal * @ return the generated document * @ throws CmsXmlException if something goes wrong */ public static String format ( String xml ) throws CmsXmlException { } }
return CmsXmlUtils . marshal ( ( Node ) CmsXmlUtils . unmarshalHelper ( xml , null ) , CmsEncoder . ENCODING_UTF_8 ) ;
public class LDblToCharFunctionBuilder { /** * One of ways of creating builder . This is possibly the least verbose way where compiler should be able to guess the generic parameters . */ @ Nonnull public static LDblToCharFunction dblToCharFunctionFrom ( Consumer < LDblToCharFunctionBuilder > buildingFunction ) { } }
LDblToCharFunctionBuilder builder = new LDblToCharFunctionBuilder ( ) ; buildingFunction . accept ( builder ) ; return builder . build ( ) ;
public class JobConf { /** * Find a jar that contains a class of the same name , if any . * It will return a jar file , even if that is not the first thing * on the class path that has a class with the same name . * @ param my _ class the class to find . * @ return a jar file that contains the class , or null . * @ throws IOException */ private static String findContainingJar ( Class my_class ) { } }
ClassLoader loader = my_class . getClassLoader ( ) ; String class_file = my_class . getName ( ) . replaceAll ( "\\." , "/" ) + ".class" ; try { for ( Enumeration itr = loader . getResources ( class_file ) ; itr . hasMoreElements ( ) ; ) { URL url = ( URL ) itr . nextElement ( ) ; if ( "jar" . equals ( url . getProtocol ( ) ) ) { String toReturn = url . getPath ( ) ; if ( toReturn . startsWith ( "file:" ) ) { toReturn = toReturn . substring ( "file:" . length ( ) ) ; } toReturn = URLDecoder . decode ( toReturn , "UTF-8" ) ; return toReturn . replaceAll ( "!.*$" , "" ) ; } } } catch ( IOException e ) { throw new RuntimeException ( e ) ; } return null ;
public class Builder { /** * Returns a newly created { @ code EVCache } based on the contents of the * { @ code Builder } . */ @ SuppressWarnings ( "deprecation" ) public EVCache build ( ) { } }
if ( _poolManager == null ) { _poolManager = EVCacheClientPoolManager . getInstance ( ) ; if ( logger . isDebugEnabled ( ) ) logger . debug ( "_poolManager - " + _poolManager + " through getInstance" ) ; } if ( _appName == null ) { throw new IllegalArgumentException ( "param appName cannot be null." ) ; } customize ( ) ; return new EVCacheImpl ( _appName , _cachePrefix , _ttl , _transcoder , _serverGroupRetry , _enableExceptionThrowing , _poolManager ) ;
public class ObjectReferenceDescriptor { /** * add a foreign key field ID */ public void addForeignKeyField ( int newId ) { } }
if ( m_ForeignKeyFields == null ) { m_ForeignKeyFields = new Vector ( ) ; } m_ForeignKeyFields . add ( new Integer ( newId ) ) ;
public class BatchReadResult { /** * A list of all the responses for each batch read . * @ param responses * A list of all the responses for each batch read . */ public void setResponses ( java . util . Collection < BatchReadOperationResponse > responses ) { } }
if ( responses == null ) { this . responses = null ; return ; } this . responses = new java . util . ArrayList < BatchReadOperationResponse > ( responses ) ;
public class DeleteBGPPeerRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( DeleteBGPPeerRequest deleteBGPPeerRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( deleteBGPPeerRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( deleteBGPPeerRequest . getVirtualInterfaceId ( ) , VIRTUALINTERFACEID_BINDING ) ; protocolMarshaller . marshall ( deleteBGPPeerRequest . getAsn ( ) , ASN_BINDING ) ; protocolMarshaller . marshall ( deleteBGPPeerRequest . getCustomerAddress ( ) , CUSTOMERADDRESS_BINDING ) ; protocolMarshaller . marshall ( deleteBGPPeerRequest . getBgpPeerId ( ) , BGPPEERID_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class Element { /** * Returns the element symbol of this element . * @ return The element symbol of this element . Null if unset . * @ see # setSymbol */ @ Override public String getSymbol ( ) { } }
if ( atomicNumber == null ) return null ; if ( atomicNumber == 0 ) return "R" ; return Elements . ofNumber ( atomicNumber ) . symbol ( ) ;
public class route { /** * Use this API to update route . */ public static base_response update ( nitro_service client , route resource ) throws Exception { } }
route updateresource = new route ( ) ; updateresource . network = resource . network ; updateresource . netmask = resource . netmask ; updateresource . gateway = resource . gateway ; updateresource . td = resource . td ; updateresource . distance = resource . distance ; updateresource . cost1 = resource . cost1 ; updateresource . weight = resource . weight ; updateresource . advertise = resource . advertise ; updateresource . protocol = resource . protocol ; updateresource . msr = resource . msr ; updateresource . monitor = resource . monitor ; return updateresource . update_resource ( client ) ;
public class UndecidedNode { /** * Size of this subtree ; sets _ nodeType also */ @ Override public final int size ( ) { } }
if ( _size != 0 ) return _size ; // Cached size assert _nodeType == 0 : "unexpected node type: " + _nodeType ; if ( _split . _equal != 0 ) _nodeType |= _split . _equal == 1 ? 4 : ( _split . _equal == 2 ? 8 : 12 ) ; // int res = 7 ; / / 1B node type + flags , 2B colId , 4B float split val // 1B node type + flags , 2B colId , 4B split val / small group or ( 2B offset + 4B size ) + large group int res = _split . _equal == 3 ? 9 + _split . _bs . numBytes ( ) : 7 ; // NA handling correction res ++ ; // 1 byte for NA split dir if ( _split . _nasplit == DHistogram . NASplitDir . NAvsREST ) res -= _split . _equal == 3 ? 6 + _split . _bs . numBytes ( ) : 4 ; // don ' t need certain stuff Node left = _tree . node ( _nids [ 0 ] ) ; int lsz = left . size ( ) ; res += lsz ; if ( left instanceof LeafNode ) _nodeType |= ( byte ) 48 ; else { int slen = lsz < 256 ? 0 : ( lsz < 65535 ? 1 : ( lsz < ( 1 << 24 ) ? 2 : 3 ) ) ; _nodeType |= slen ; // Set the size - skip bits res += ( slen + 1 ) ; } Node right = _tree . node ( _nids [ 1 ] ) ; if ( right instanceof LeafNode ) _nodeType |= ( byte ) ( 48 << 2 ) ; res += right . size ( ) ; assert ( _nodeType & 0x33 ) != 51 ; assert res != 0 ; return ( _size = res ) ;
public class MutableBigInteger { /** * Multiply the contents of two MutableBigInteger objects . The result is * placed into MutableBigInteger z . The contents of y are not changed . */ void multiply ( MutableBigInteger y , MutableBigInteger z ) { } }
int xLen = intLen ; int yLen = y . intLen ; int newLen = xLen + yLen ; // Put z into an appropriate state to receive product if ( z . value . length < newLen ) z . value = new int [ newLen ] ; z . offset = 0 ; z . intLen = newLen ; // The first iteration is hoisted out of the loop to avoid extra add long carry = 0 ; for ( int j = yLen - 1 , k = yLen + xLen - 1 ; j >= 0 ; j -- , k -- ) { long product = ( y . value [ j + y . offset ] & LONG_MASK ) * ( value [ xLen - 1 + offset ] & LONG_MASK ) + carry ; z . value [ k ] = ( int ) product ; carry = product >>> 32 ; } z . value [ xLen - 1 ] = ( int ) carry ; // Perform the multiplication word by word for ( int i = xLen - 2 ; i >= 0 ; i -- ) { carry = 0 ; for ( int j = yLen - 1 , k = yLen + i ; j >= 0 ; j -- , k -- ) { long product = ( y . value [ j + y . offset ] & LONG_MASK ) * ( value [ i + offset ] & LONG_MASK ) + ( z . value [ k ] & LONG_MASK ) + carry ; z . value [ k ] = ( int ) product ; carry = product >>> 32 ; } z . value [ i ] = ( int ) carry ; } // Remove leading zeros from product z . normalize ( ) ;
public class WhiteboardDtoService { /** * Maps a default context ( whithout whiteboard - service ) to a ServletContextDTO */ private ServletContextDTO mapServletContext ( Map . Entry < ServiceReference < ServletContext > , ServletContext > mapEntry ) { } }
final ServiceReference < ServletContext > ref = mapEntry . getKey ( ) ; final ServletContext servletContext = mapEntry . getValue ( ) ; ServletContextDTO dto = new ServletContextDTO ( ) ; dto . serviceId = ( long ) ref . getProperty ( Constants . SERVICE_ID ) ; // the actual ServletContext might use " " instead of " / " ( depends on the // container ) . DTO must use " / " for root dto . contextPath = servletContext . getContextPath ( ) . trim ( ) . length ( ) == 0 ? "/" : servletContext . getContextPath ( ) ; dto . name = servletContext . getServletContextName ( ) ; dto . attributes = Collections . list ( servletContext . getAttributeNames ( ) ) . stream ( ) . map ( name -> new SimpleEntry < > ( name , servletContext . getAttribute ( name ) ) ) . collect ( Collectors . toMap ( SimpleEntry :: getKey , SimpleEntry :: getValue ) ) ; dto . initParams = Collections . list ( servletContext . getInitParameterNames ( ) ) . stream ( ) . map ( name -> new SimpleEntry < > ( name , servletContext . getInitParameter ( name ) ) ) . collect ( Collectors . toMap ( SimpleEntry :: getKey , SimpleEntry :: getValue ) ) ; return dto ;
public class FactoryFeatureExtractor { /** * Standard non - max feature extractor . * @ param config Configuration for extractor * @ return A feature extractor . */ public static NonMaxSuppression nonmax ( @ Nullable ConfigExtract config ) { } }
if ( config == null ) config = new ConfigExtract ( ) ; config . checkValidity ( ) ; if ( BOverrideFactoryFeatureExtractor . nonmax != null ) { return BOverrideFactoryFeatureExtractor . nonmax . process ( config ) ; } NonMaxBlock . Search search ; if ( config . useStrictRule ) { if ( config . detectMaximums ) if ( config . detectMinimums ) search = new NonMaxBlockSearchStrict . MinMax ( ) ; else search = new NonMaxBlockSearchStrict . Max ( ) ; else search = new NonMaxBlockSearchStrict . Min ( ) ; } else { if ( config . detectMaximums ) if ( config . detectMinimums ) search = new NonMaxBlockSearchRelaxed . MinMax ( ) ; else search = new NonMaxBlockSearchRelaxed . Max ( ) ; else search = new NonMaxBlockSearchRelaxed . Min ( ) ; } // See if the user wants to use threaded code or not NonMaxBlock alg = BoofConcurrency . USE_CONCURRENT ? new NonMaxBlock_MT ( search ) : new NonMaxBlock ( search ) ; alg . setSearchRadius ( config . radius ) ; alg . setThresholdMax ( config . threshold ) ; alg . setThresholdMin ( - config . threshold ) ; alg . setBorder ( config . ignoreBorder ) ; return new WrapperNonMaximumBlock ( alg ) ;
public class FunctionAnnotation { public static SingleInputSemanticProperties readSingleConstantAnnotations ( UserCodeWrapper < ? > udf ) { } }
// get constantSet annotation from stub AllFieldsConstants allConstants = udf . getUserCodeAnnotation ( AllFieldsConstants . class ) ; ConstantFields constantSet = udf . getUserCodeAnnotation ( ConstantFields . class ) ; ConstantFieldsExcept notConstantSet = udf . getUserCodeAnnotation ( ConstantFieldsExcept . class ) ; if ( notConstantSet != null && ( constantSet != null || allConstants != null ) ) { throw new RuntimeException ( "Either ConstantFields or ConstantFieldsExcept can be specified, not both." ) ; } // extract notConstantSet from annotation if ( notConstantSet != null ) { FieldSet nonConstant = new FieldSet ( notConstantSet . value ( ) ) ; return new ImplicitlyForwardingSingleInputSemanticProperties ( nonConstant ) ; } // extract notConstantSet from annotation if ( allConstants != null ) { FieldSet nonConstant = new FieldSet ( ) ; return new ImplicitlyForwardingSingleInputSemanticProperties ( nonConstant ) ; } SingleInputSemanticProperties semanticProperties = new SingleInputSemanticProperties ( ) ; // extract constantSet from annotation if ( constantSet != null ) { for ( int value : constantSet . value ( ) ) { semanticProperties . addForwardedField ( value , value ) ; } } return semanticProperties ;
public class FNDImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public void setMaxPtSize ( Integer newMaxPtSize ) { } }
Integer oldMaxPtSize = maxPtSize ; maxPtSize = newMaxPtSize ; if ( eNotificationRequired ( ) ) eNotify ( new ENotificationImpl ( this , Notification . SET , AfplibPackage . FND__MAX_PT_SIZE , oldMaxPtSize , maxPtSize ) ) ;
public class ProjectCalendar { /** * Utility method to clear cached calendar data . */ private void clearWorkingDateCache ( ) { } }
m_workingDateCache . clear ( ) ; m_startTimeCache . clear ( ) ; m_getDateLastResult = null ; for ( ProjectCalendar calendar : m_derivedCalendars ) { calendar . clearWorkingDateCache ( ) ; }
public class DefaultLogbackConfigurator { /** * { @ inheritDoc } */ @ Override public void addError ( String msg , Throwable ex ) { } }
context . getStatusManager ( ) . add ( new ErrorStatus ( msg , context , ex ) ) ;
public class AppEngineGetList { /** * Qualifies entities to be retrieved with the specified { @ code Filter } s . * Each of the specified filters is combined with " logical and " . * @ param filters { @ code Filter } s to qualify entities to be retrieved . * @ return The { @ code GetList } which the execution result is * qualified with the specified { @ code Filter } s . */ @ Override public AppEngineGetList < E > filter ( Filter < ? > ... filters ) { } }
if ( filters == null ) { throw new IllegalArgumentException ( "'filters' must not be [" + null + "]" ) ; } this . filters = Arrays . asList ( filters ) ; return this ;
public class Ifc2x3tc1PackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public EClass getIfcPowerMeasure ( ) { } }
if ( ifcPowerMeasureEClass == null ) { ifcPowerMeasureEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( Ifc2x3tc1Package . eNS_URI ) . getEClassifiers ( ) . get ( 724 ) ; } return ifcPowerMeasureEClass ;
public class ContentsIdExtension { /** * Remove all trace of the extension */ public void removeExtension ( ) { } }
try { if ( contentsIdDao . isTableExists ( ) ) { geoPackage . dropTable ( contentsIdDao . getTableName ( ) ) ; } if ( extensionsDao . isTableExists ( ) ) { extensionsDao . deleteByExtension ( EXTENSION_NAME ) ; } } catch ( SQLException e ) { throw new GeoPackageException ( "Failed to delete Contents Id extension and table. GeoPackage: " + geoPackage . getName ( ) , e ) ; }
public class JavaParser { /** * src / main / resources / org / drools / compiler / semantics / java / parser / Java . g : 309:1 : typeParameter : Identifier ( ' extends ' bound ) ? ; */ public final void typeParameter ( ) throws RecognitionException { } }
int typeParameter_StartIndex = input . index ( ) ; try { if ( state . backtracking > 0 && alreadyParsedRule ( input , 9 ) ) { return ; } // src / main / resources / org / drools / compiler / semantics / java / parser / Java . g : 310:5 : ( Identifier ( ' extends ' bound ) ? ) // src / main / resources / org / drools / compiler / semantics / java / parser / Java . g : 310:7 : Identifier ( ' extends ' bound ) ? { match ( input , Identifier , FOLLOW_Identifier_in_typeParameter363 ) ; if ( state . failed ) return ; // src / main / resources / org / drools / compiler / semantics / java / parser / Java . g : 310:18 : ( ' extends ' bound ) ? int alt16 = 2 ; int LA16_0 = input . LA ( 1 ) ; if ( ( LA16_0 == 81 ) ) { alt16 = 1 ; } switch ( alt16 ) { case 1 : // src / main / resources / org / drools / compiler / semantics / java / parser / Java . g : 310:19 : ' extends ' bound { match ( input , 81 , FOLLOW_81_in_typeParameter366 ) ; if ( state . failed ) return ; pushFollow ( FOLLOW_bound_in_typeParameter368 ) ; bound ( ) ; state . _fsp -- ; if ( state . failed ) return ; } break ; } } } catch ( RecognitionException re ) { reportError ( re ) ; recover ( input , re ) ; } finally { // do for sure before leaving if ( state . backtracking > 0 ) { memoize ( input , 9 , typeParameter_StartIndex ) ; } }
public class BufsConsumerGzipInflater { /** * region skip header fields */ private void skipHeaders ( int flag ) { } }
// trying to skip optional gzip file members if any is present if ( ( flag & FEXTRA ) != 0 ) { skipExtra ( flag ) ; } else if ( ( flag & FNAME ) != 0 ) { skipTerminatorByte ( flag , FNAME ) ; } else if ( ( flag & FCOMMENT ) != 0 ) { skipTerminatorByte ( flag , FCOMMENT ) ; } else if ( ( flag & FHCRC ) != 0 ) { skipCRC16 ( flag ) ; }
public class Constraints { /** * Returns a constrained view of the specified collection , using the specified * constraint . Any operations that add new elements to the collection will * call the provided constraint . However , this method does not verify that * existing elements satisfy the constraint . * < p > The returned collection is not serializable . * @ param collection the collection to constrain * @ param constraint the constraint that validates added elements * @ return a constrained view of the collection */ public static < E > Collection < E > constrainedCollection ( Collection < E > collection , Constraint < ? super E > constraint ) { } }
return new ConstrainedCollection < E > ( collection , constraint ) ;
public class DateTimeParseContext { /** * Ends the parsing of an optional segment of the input . * @ param successful whether the optional segment was successfully parsed */ void endOptional ( boolean successful ) { } }
if ( successful ) { parsed . remove ( parsed . size ( ) - 2 ) ; } else { parsed . remove ( parsed . size ( ) - 1 ) ; }
public class ProxyBuilderDefaultImpl { /** * ( non - Javadoc ) * @ see * io . joynr . proxy . ProxyBuilder # setMessagingQos ( io . joynr . messaging . MessagingQos ) */ @ Override public ProxyBuilder < T > setMessagingQos ( final MessagingQos messagingQos ) { } }
if ( messagingQos . getRoundTripTtl_ms ( ) > maxMessagingTtl ) { logger . warn ( "Error in MessageQos. domains: {} interface: {} Max allowed ttl: {}. Passed ttl: {}" , domains , interfaceName , maxMessagingTtl , messagingQos . getRoundTripTtl_ms ( ) ) ; messagingQos . setTtl_ms ( maxMessagingTtl ) ; } this . messagingQos = messagingQos ; return this ;
public class CmsAccountsToolHandler { /** * Returns the visibility flag module parameter value . < p > * @ return the visibility flag module parameter value */ protected String getVisibilityFlag ( ) { } }
CmsModule module = OpenCms . getModuleManager ( ) . getModule ( this . getClass ( ) . getPackage ( ) . getName ( ) ) ; if ( module == null ) { return VISIBILITY_ALL ; } return module . getParameter ( PARAM_VISIBILITY_FLAG , VISIBILITY_ALL ) ;
public class FeatureWriter { /** * Adds organism , / mol _ type and / db _ ref = " taxon : " feature qualifiers into * the source feature . If these qualifiers already exist they are removed . */ public static Vector < Qualifier > getFeatureQualifiers ( Entry entry , Feature feature ) { } }
Vector < Qualifier > qualifiers = new Vector < Qualifier > ( ) ; if ( feature instanceof SourceFeature ) { String scientificName = ( ( SourceFeature ) feature ) . getScientificName ( ) ; if ( ! FlatFileUtils . isBlankString ( scientificName ) ) { Qualifier qualifier = ( new QualifierFactory ( ) ) . createQualifier ( "organism" , scientificName ) ; qualifiers . add ( qualifier ) ; } String moleculeType = entry . getSequence ( ) . getMoleculeType ( ) ; if ( ! FlatFileUtils . isBlankString ( moleculeType ) ) { Qualifier qualifier = ( new QualifierFactory ( ) ) . createQualifier ( "mol_type" , moleculeType ) ; qualifiers . add ( qualifier ) ; } Long taxId = ( ( SourceFeature ) feature ) . getTaxId ( ) ; if ( taxId != null && taxId > - 1 /* do not show negative taxIds */ ) { Qualifier qualifier = ( new QualifierFactory ( ) ) . createQualifier ( "db_xref" , "taxon:" + taxId . toString ( ) ) ; qualifiers . add ( qualifier ) ; } } for ( Qualifier qualifier : feature . getQualifiers ( ) ) { String name = qualifier . getName ( ) ; String value = qualifier . getValue ( ) ; if ( name == null ) { continue ; } if ( name . equals ( Qualifier . ORGANISM_QUALIFIER_NAME ) ) { continue ; // Ignore / organism qualifiers . } if ( name . equals ( Qualifier . MOL_TYPE_QUALIFIER_NAME ) ) { continue ; // Ignore / mol _ type qualifiers . } if ( name . equals ( Qualifier . DB_XREF_QUALIFIER_NAME ) && value != null && value . startsWith ( "taxon:" ) ) { continue ; // Ignore / db _ xref = " taxon : " qualifiers . } qualifiers . add ( qualifier ) ; } return qualifiers ;
public class LinearSolverQrHouseCol_DDRM { /** * Solves for X using the QR decomposition . * @ param B A matrix that is n by m . Not modified . * @ param X An n by m matrix where the solution is written to . Modified . */ @ Override public void solve ( DMatrixRMaj B , DMatrixRMaj X ) { } }
if ( B . numRows != numRows ) throw new IllegalArgumentException ( "Unexpected dimensions for X: X rows = " + X . numRows + " expected = " + numRows ) ; X . reshape ( numCols , B . numCols ) ; int BnumCols = B . numCols ; // solve each column one by one for ( int colB = 0 ; colB < BnumCols ; colB ++ ) { // make a copy of this column in the vector for ( int i = 0 ; i < numRows ; i ++ ) { a . data [ i ] = B . data [ i * BnumCols + colB ] ; } // Solve Qa = b // a = Q ' b // a = Q _ { n - 1 } . . . Q _ 2 * Q _ 1 * b // Q _ n * b = ( I - gamma * u * u ^ T ) * b = b - u * ( gamma * U ^ T * b ) for ( int n = 0 ; n < numCols ; n ++ ) { double [ ] u = QR [ n ] ; double vv = u [ n ] ; u [ n ] = 1 ; QrHelperFunctions_DDRM . rank1UpdateMultR ( a , u , gammas [ n ] , 0 , n , numRows , temp . data ) ; u [ n ] = vv ; } // solve for Rx = b using the standard upper triangular solver TriangularSolver_DDRM . solveU ( R . data , a . data , numCols ) ; // save the results for ( int i = 0 ; i < numCols ; i ++ ) { X . data [ i * X . numCols + colB ] = a . data [ i ] ; } }
public class RandomUtil { /** * Picks a random object from the supplied List . The specified skip object will be skipped when * selecting a random value . The skipped object must exist exactly once in the List . * @ return a randomly selected item . */ public static < T > T pickRandom ( List < T > values , T skip ) { } }
return pickRandom ( values , skip , rand ) ;
public class TangoCacheManager { /** * Remove polling of a command * @ param command * @ throws DevFailed */ public synchronized void removeCommandPolling ( final CommandImpl command ) throws DevFailed { } }
if ( commandCacheMap . containsKey ( command ) ) { final CommandCache cache = commandCacheMap . get ( command ) ; cache . stopRefresh ( ) ; commandCacheMap . remove ( command ) ; } else if ( extTrigCommandCacheMap . containsKey ( command ) ) { extTrigCommandCacheMap . remove ( command ) ; } else if ( command . getName ( ) . equalsIgnoreCase ( DeviceImpl . STATE_NAME ) && stateCache != null ) { stateCache . stopRefresh ( ) ; stateCache = null ; } else if ( command . getName ( ) . equalsIgnoreCase ( DeviceImpl . STATUS_NAME ) && statusCache != null ) { statusCache . stopRefresh ( ) ; statusCache = null ; }
public class Generator { /** * Gets or creates a { @ link LibBinder } object for the given * type library . */ private LibBinder getTypeLibInfo ( IWTypeLib p ) throws BindingException { } }
LibBinder tli = typeLibs . get ( p ) ; if ( tli == null ) { typeLibs . put ( p , tli = new LibBinder ( p ) ) ; } return tli ;
public class RealtimeTuningConfig { /** * Might make sense for this to be a builder */ public static RealtimeTuningConfig makeDefaultTuningConfig ( final @ Nullable File basePersistDirectory ) { } }
return new RealtimeTuningConfig ( defaultMaxRowsInMemory , 0L , defaultIntermediatePersistPeriod , defaultWindowPeriod , basePersistDirectory == null ? createNewBasePersistDirectory ( ) : basePersistDirectory , defaultVersioningPolicy , defaultRejectionPolicyFactory , defaultMaxPendingPersists , defaultShardSpec , defaultIndexSpec , true , 0 , 0 , defaultReportParseExceptions , defaultHandoffConditionTimeout , defaultAlertTimeout , null , defaultDedupColumn ) ;
public class DuplicationTaskProcessor { /** * Deletes a content item in the destination space , but only if it does not exists in the * source manifest . * @ param spaceId * @ param contentId */ private void duplicateDeletion ( final String spaceId , final String contentId ) throws TaskExecutionFailedException { } }
if ( existsInSourceManifest ( spaceId , contentId ) ) { throw new TaskExecutionFailedException ( MessageFormat . format ( "item exists in source manifest and thus appears to be " + "missing content. account={0}, storeId={1}, spaceId={2}, contentId={3}" , this . dupTask . getAccount ( ) , this . dupTask . getSourceStoreId ( ) , spaceId , contentId ) ) ; } log . info ( "Duplicating deletion of " + contentId + " in dest space " + spaceId + " in account " + dupTask . getAccount ( ) ) ; try { new Retrier ( ) . execute ( new Retriable ( ) { @ Override public String retry ( ) throws Exception { // Delete content destStore . deleteContent ( spaceId , contentId ) ; return "success" ; } } ) ; } catch ( Exception e ) { String msg = "Error attempting to delete content : " + e . getMessage ( ) ; throw new DuplicationTaskExecutionFailedException ( buildFailureMessage ( msg ) , e ) ; } log . info ( "Successfully deleted content item (content_id=" + contentId + ") in dest space (space_id=" + spaceId + ") where account_id=" + dupTask . getAccount ( ) ) ;
public class MagickUtil { /** * Converts an { @ code MagickImage } to a { @ code BufferedImage } which holds an CMYK ICC profile * @ param pImage the original { @ code MagickImage } * @ param pAlpha keep alpha channel * @ return a new { @ code BufferedImage } * @ throws MagickException if an exception occurs during conversion * @ see BufferedImage */ private static BufferedImage cmykToBuffered ( MagickImage pImage , boolean pAlpha ) throws MagickException { } }
Dimension size = pImage . getDimension ( ) ; int length = size . width * size . height ; // Retreive the ICC profile ICC_Profile profile = ICC_Profile . getInstance ( pImage . getColorProfile ( ) . getInfo ( ) ) ; ColorSpace cs = new ICC_ColorSpace ( profile ) ; int bands = cs . getNumComponents ( ) + ( pAlpha ? 1 : 0 ) ; int [ ] bits = new int [ bands ] ; for ( int i = 0 ; i < bands ; i ++ ) { bits [ i ] = 8 ; } ColorModel cm = pAlpha ? new ComponentColorModel ( cs , bits , true , true , Transparency . TRANSLUCENT , DataBuffer . TYPE_BYTE ) : new ComponentColorModel ( cs , bits , false , false , Transparency . OPAQUE , DataBuffer . TYPE_BYTE ) ; byte [ ] pixels = new byte [ length * bands ] ; // TODO : If we do multiple dispatches ( one per line , typically ) , we could provide listener // feedback . But it ' s currently a lot slower than fetching all the pixels in one go . // TODO : handle more generic cases if profile is not CMYK // TODO : Test " ACMYK " pImage . dispatchImage ( 0 , 0 , size . width , size . height , pAlpha ? "ACMYK" : "CMYK" , pixels ) ; // Init databuffer with array , to avoid allocation of empty array DataBuffer buffer = new DataBufferByte ( pixels , pixels . length ) ; // TODO : build array from bands variable , here it just works for CMYK // The values has not been tested with an alpha picture actually . . . int [ ] bandOffsets = pAlpha ? new int [ ] { 0 , 1 , 2 , 3 , 4 } : new int [ ] { 0 , 1 , 2 , 3 } ; WritableRaster raster = Raster . createInterleavedRaster ( buffer , size . width , size . height , size . width * bands , bands , bandOffsets , LOCATION_UPPER_LEFT ) ; return new BufferedImage ( cm , raster , pAlpha , null ) ;
public class OriginField { /** * Reads attribute from text . * @ param line the text . */ public void strain ( Text line ) throws ParseException { } }
try { Iterator < Text > it = line . split ( '=' ) . iterator ( ) ; it . next ( ) ; Text token = it . next ( ) ; it = token . split ( ' ' ) . iterator ( ) ; name = it . next ( ) ; name . trim ( ) ; sessionID = it . next ( ) ; sessionID . trim ( ) ; sessionVersion = it . next ( ) ; sessionVersion . trim ( ) ; networkType = it . next ( ) ; networkType . trim ( ) ; addressType = it . next ( ) ; addressType . trim ( ) ; address = it . next ( ) ; address . trim ( ) ; } catch ( Exception e ) { throw new ParseException ( "Could not parse origin" , 0 ) ; }
public class TaskRuntime { /** * Deliver a message to the Task . This calls into the user supplied message handler . * @ param message the message to be delivered . */ public void deliver ( final byte [ ] message ) { } }
synchronized ( this . heartBeatManager ) { if ( this . currentStatus . isNotRunning ( ) ) { LOG . log ( Level . WARNING , "Trying to send a message to a task that is in state: {0}. Ignoring." , this . currentStatus . getState ( ) ) ; } else { try { this . deliverMessageToTask ( message ) ; } catch ( final TaskMessageHandlerFailure taskMessageHandlerFailure ) { LOG . log ( Level . WARNING , "Exception while executing task close handler." , taskMessageHandlerFailure . getCause ( ) ) ; this . currentStatus . setException ( taskMessageHandlerFailure . getCause ( ) ) ; } } }
public class AbstractMultipleOptionsTablePanel { /** * { @ inheritDoc } * Overridden to also enable / disable the added buttons ( " enable all " and " disable all " ) . */ @ Override public void setComponentEnabled ( boolean enabled ) { } }
super . setComponentEnabled ( enabled ) ; boolean enable = enabled && getModel ( ) . getRowCount ( ) > 0 ; enableAllButton . setEnabled ( enable ) ; disableAllButton . setEnabled ( enable ) ;
public class BrowserSessionImpl { /** * Check if the session is closed . If the closed flag is set to * true , throw an exception . * @ throws SISessionUnavailableException thrown if the session is closed */ void checkNotClosed ( ) throws SISessionUnavailableException { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "checkNotClosed" ) ; synchronized ( this ) { if ( _closed ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "checkNotClosed" , "Object closed" ) ; throw new SISessionUnavailableException ( nls . getFormattedMessage ( "OBJECT_CLOSED_ERROR_CWSIP0081" , new Object [ ] { _dest . getName ( ) , _dest . getMessageProcessor ( ) . getMessagingEngineName ( ) } , null ) ) ; } } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "checkNotClosed" ) ;
public class LoggingSubsystemParser { /** * Reads the single { @ code name } attribute from an element . * @ param reader the reader to use * @ return the value of the { @ code name } attribute * @ throws XMLStreamException if the { @ code name } attribute is not present , there is more than one attribute on the * element or there is content within the element . */ static String readNameAttribute ( final XMLExtendedStreamReader reader ) throws XMLStreamException { } }
return readStringAttributeElement ( reader , Attribute . NAME . getLocalName ( ) ) ;
public class StopStrategies { /** * If the task executed time exceeds the specified time , the task will stop retry . * @ param time The max task executed time . * @ param timeUnit The time unit . * @ param < V > The return value type . * @ return The stop strategy predicate . */ public static < V > Predicate < TaskContext < V > > afterDelay ( long time , TimeUnit timeUnit ) { } }
return ctx -> ( System . currentTimeMillis ( ) - ctx . getStartTime ( ) ) >= timeUnit . toMillis ( time ) ;
public class TableMetaCache { /** * 处理desc table的结果 */ public static List < FieldMeta > parseTableMetaByDesc ( ResultSetPacket packet ) { } }
Map < String , Integer > nameMaps = new HashMap < String , Integer > ( 6 , 1f ) ; int index = 0 ; for ( FieldPacket fieldPacket : packet . getFieldDescriptors ( ) ) { nameMaps . put ( fieldPacket . getOriginalName ( ) , index ++ ) ; } int size = packet . getFieldDescriptors ( ) . size ( ) ; int count = packet . getFieldValues ( ) . size ( ) / packet . getFieldDescriptors ( ) . size ( ) ; List < FieldMeta > result = new ArrayList < FieldMeta > ( ) ; for ( int i = 0 ; i < count ; i ++ ) { FieldMeta meta = new FieldMeta ( ) ; // 做一个优化 , 使用String . intern ( ) , 共享String对象 , 减少内存使用 meta . setColumnName ( packet . getFieldValues ( ) . get ( nameMaps . get ( COLUMN_NAME ) + i * size ) . intern ( ) ) ; meta . setColumnType ( packet . getFieldValues ( ) . get ( nameMaps . get ( COLUMN_TYPE ) + i * size ) ) ; meta . setNullable ( StringUtils . equalsIgnoreCase ( packet . getFieldValues ( ) . get ( nameMaps . get ( IS_NULLABLE ) + i * size ) , "YES" ) ) ; meta . setKey ( "PRI" . equalsIgnoreCase ( packet . getFieldValues ( ) . get ( nameMaps . get ( COLUMN_KEY ) + i * size ) ) ) ; meta . setUnique ( "UNI" . equalsIgnoreCase ( packet . getFieldValues ( ) . get ( nameMaps . get ( COLUMN_KEY ) + i * size ) ) ) ; // 特殊处理引号 meta . setDefaultValue ( DruidDdlParser . unescapeQuotaName ( packet . getFieldValues ( ) . get ( nameMaps . get ( COLUMN_DEFAULT ) + i * size ) ) ) ; meta . setExtra ( packet . getFieldValues ( ) . get ( nameMaps . get ( EXTRA ) + i * size ) ) ; result . add ( meta ) ; } return result ;
public class ClustersInner { /** * Stops a Kusto cluster . * @ param resourceGroupName The name of the resource group containing the Kusto cluster . * @ param clusterName The name of the Kusto cluster . * @ param serviceCallback the async ServiceCallback to handle successful and failed responses . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the { @ link ServiceFuture } object */ public ServiceFuture < Void > beginStopAsync ( String resourceGroupName , String clusterName , final ServiceCallback < Void > serviceCallback ) { } }
return ServiceFuture . fromResponse ( beginStopWithServiceResponseAsync ( resourceGroupName , clusterName ) , serviceCallback ) ;
public class ClassFinder { /** * Scans class path and source path for files in given package . */ private void scanUserPaths ( PackageSymbol p , boolean includeSourcePath ) throws IOException { } }
Set < JavaFileObject . Kind > kinds = getPackageFileKinds ( ) ; Set < JavaFileObject . Kind > classKinds = EnumSet . copyOf ( kinds ) ; classKinds . remove ( JavaFileObject . Kind . SOURCE ) ; boolean wantClassFiles = ! classKinds . isEmpty ( ) ; Set < JavaFileObject . Kind > sourceKinds = EnumSet . copyOf ( kinds ) ; sourceKinds . remove ( JavaFileObject . Kind . CLASS ) ; boolean wantSourceFiles = ! sourceKinds . isEmpty ( ) ; boolean haveSourcePath = includeSourcePath && fileManager . hasLocation ( SOURCE_PATH ) ; if ( verbose && verbosePath ) { if ( fileManager instanceof StandardJavaFileManager ) { StandardJavaFileManager fm = ( StandardJavaFileManager ) fileManager ; if ( haveSourcePath && wantSourceFiles ) { List < Path > path = List . nil ( ) ; for ( Path sourcePath : fm . getLocationAsPaths ( SOURCE_PATH ) ) { path = path . prepend ( sourcePath ) ; } log . printVerbose ( "sourcepath" , path . reverse ( ) . toString ( ) ) ; } else if ( wantSourceFiles ) { List < Path > path = List . nil ( ) ; for ( Path classPath : fm . getLocationAsPaths ( CLASS_PATH ) ) { path = path . prepend ( classPath ) ; } log . printVerbose ( "sourcepath" , path . reverse ( ) . toString ( ) ) ; } if ( wantClassFiles ) { List < Path > path = List . nil ( ) ; for ( Path platformPath : fm . getLocationAsPaths ( PLATFORM_CLASS_PATH ) ) { path = path . prepend ( platformPath ) ; } for ( Path classPath : fm . getLocationAsPaths ( CLASS_PATH ) ) { path = path . prepend ( classPath ) ; } log . printVerbose ( "classpath" , path . reverse ( ) . toString ( ) ) ; } } } String packageName = p . fullname . toString ( ) ; if ( wantSourceFiles && ! haveSourcePath ) { fillIn ( p , CLASS_PATH , list ( CLASS_PATH , p , packageName , kinds ) ) ; } else { if ( wantClassFiles ) fillIn ( p , CLASS_PATH , list ( CLASS_PATH , p , packageName , classKinds ) ) ; if ( wantSourceFiles ) fillIn ( p , SOURCE_PATH , list ( SOURCE_PATH , p , packageName , sourceKinds ) ) ; }
public class ClusterManagerClient { /** * Sets the maintenance policy for a cluster . * < p > Sample code : * < pre > < code > * try ( ClusterManagerClient clusterManagerClient = ClusterManagerClient . create ( ) ) { * String projectId = " " ; * String zone = " " ; * String clusterId = " " ; * MaintenancePolicy maintenancePolicy = MaintenancePolicy . newBuilder ( ) . build ( ) ; * Operation response = clusterManagerClient . setMaintenancePolicy ( projectId , zone , clusterId , maintenancePolicy ) ; * < / code > < / pre > * @ param projectId The Google Developers Console [ project ID or project * number ] ( https : / / support . google . com / cloud / answer / 6158840 ) . * @ param zone The name of the Google Compute Engine [ zone ] ( / compute / docs / zones # available ) in * which the cluster resides . * @ param clusterId The name of the cluster to update . * @ param maintenancePolicy The maintenance policy to be set for the cluster . An empty field * clears the existing maintenance policy . * @ throws com . google . api . gax . rpc . ApiException if the remote call fails */ public final Operation setMaintenancePolicy ( String projectId , String zone , String clusterId , MaintenancePolicy maintenancePolicy ) { } }
SetMaintenancePolicyRequest request = SetMaintenancePolicyRequest . newBuilder ( ) . setProjectId ( projectId ) . setZone ( zone ) . setClusterId ( clusterId ) . setMaintenancePolicy ( maintenancePolicy ) . build ( ) ; return setMaintenancePolicy ( request ) ;
public class XClosureImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public void eUnset ( int featureID ) { } }
switch ( featureID ) { case XbasePackage . XCLOSURE__DECLARED_FORMAL_PARAMETERS : getDeclaredFormalParameters ( ) . clear ( ) ; return ; case XbasePackage . XCLOSURE__EXPRESSION : setExpression ( ( XExpression ) null ) ; return ; case XbasePackage . XCLOSURE__EXPLICIT_SYNTAX : setExplicitSyntax ( EXPLICIT_SYNTAX_EDEFAULT ) ; return ; case XbasePackage . XCLOSURE__IMPLICIT_FORMAL_PARAMETERS : getImplicitFormalParameters ( ) . clear ( ) ; return ; } super . eUnset ( featureID ) ;
public class JSR154Filter { public void doFilter ( ServletRequest request , ServletResponse response , FilterChain chain ) throws IOException , ServletException { } }
HttpServletRequest srequest = ( HttpServletRequest ) request ; HttpServletResponse sresponse = ( HttpServletResponse ) response ; Request requestWrapper = null ; Response responseWrapper = null ; boolean root_filter = false ; // Do we need a root wrapper ? ThreadState state = state ( ) ; if ( _unwrappedDispatchSupported || LazyList . size ( _requestAttributeListeners ) > 0 ) { if ( srequest instanceof ServletHttpRequest ) { request = state . rootRequest = requestWrapper = new Request ( srequest ) ; response = state . rootResponse = responseWrapper = new Response ( sresponse ) ; root_filter = true ; } else { requestWrapper = state . rootRequest ; responseWrapper = state . rootResponse ; } } // Is this the first time this request has been in this _ context ? boolean first_in_context = root_filter || requestWrapper != null && requestWrapper . getRequest ( ) != null && requestWrapper . getRequest ( ) instanceof Dispatcher . DispatcherRequest && ( ( Dispatcher . DispatcherRequest ) requestWrapper . getRequest ( ) ) . crossContext ( ) ; if ( first_in_context ) { requestInitialized ( request ) ; if ( requestWrapper != null && LazyList . size ( _requestAttributeListeners ) > 0 ) requestWrapper . addContextFilter ( this ) ; } // setup dispatch boolean dispatch = false ; if ( _unwrappedDispatchSupported && state . dispatchRequest != null ) { dispatch = true ; requestWrapper . pushWrapper ( state . dispatchRequest ) ; responseWrapper . pushWrapper ( state . dispatchResponse ) ; state . dispatchRequest = null ; state . dispatchResponse = null ; } try { chain . doFilter ( request , response ) ; } finally { if ( first_in_context ) { requestDestroyed ( request ) ; if ( requestWrapper != null && LazyList . size ( _requestAttributeListeners ) > 0 ) requestWrapper . delContextFilter ( this ) ; } if ( root_filter ) state . clear ( ) ; if ( dispatch ) { requestWrapper . popWrapper ( ) ; responseWrapper . popWrapper ( ) ; } }
public class CommandFaceDescriptor { /** * Configures the given button and command using the given configurer and the information * contained in this instance . * @ param button The button to be configured . Must not be null . * @ param command The command to be configured . May be null . * @ param configurer The configurer . Must not be null . * @ throws IllegalArgumentException if { @ code button } or { @ code configurer } are null . */ public void configure ( AbstractButton button , AbstractCommand command , CommandButtonConfigurer configurer ) { } }
Assert . notNull ( button , "button" ) ; Assert . notNull ( configurer , "configurer" ) ; configurer . configure ( button , command , this ) ;
public class GDominanceComparator { /** * Compares two solutions . * @ param solution1 Object representing the first < code > Solution < / code > . * @ param solution2 Object representing the second < code > Solution < / code > . * @ return - 1 , or 0 , or 1 if solution1 dominates solution2 , both are * non - dominated , or solution1 is dominated by solution2 , respectively . */ @ Override public int compare ( S solution1 , S solution2 ) { } }
if ( solution1 == null ) { throw new JMetalException ( "Solution1 is null" ) ; } else if ( solution2 == null ) { throw new JMetalException ( "Solution2 is null" ) ; } else if ( solution1 . getNumberOfObjectives ( ) != solution2 . getNumberOfObjectives ( ) ) { throw new JMetalException ( "Cannot compare because solution1 has " + solution1 . getNumberOfObjectives ( ) + " objectives and solution2 has " + solution2 . getNumberOfObjectives ( ) ) ; } int result = flagComparison ( solution1 , solution2 ) ; return result ;
public class AtlassianRepository { /** * { @ inheritDoc } */ public void setDocumentAsImplemeted ( String location ) throws Exception { } }
Vector < ? > args = CollectionUtil . toVector ( username , password , args ( URI . create ( URIUtil . raw ( location ) ) ) ) ; XmlRpcClient xmlrpc = getXmlRpcClient ( ) ; String msg = ( String ) xmlrpc . execute ( new XmlRpcRequest ( handler + ".setSpecificationAsImplemented" , args ) ) ; if ( ! ( "<success>" . equals ( msg ) ) ) throw new Exception ( msg ) ;
public class FctBnAccEntitiesProcessors { /** * < p > Lazy get PrcAccDocCogsRetrieve . < / p > * @ param pAddParam additional param * @ return requested PrcAccDocCogsRetrieve * @ throws Exception - an exception */ protected final PrcAccDocCogsRetrieve lazyGetPrcAccDocCogsRetrieve ( final Map < String , Object > pAddParam ) throws Exception { } }
@ SuppressWarnings ( "unchecked" ) PrcAccDocCogsRetrieve < RS , IDocWarehouse > proc = ( PrcAccDocCogsRetrieve < RS , IDocWarehouse > ) this . processorsMap . get ( PrcAccDocCogsRetrieve . class . getSimpleName ( ) ) ; if ( proc == null ) { proc = new PrcAccDocCogsRetrieve < RS , IDocWarehouse > ( ) ; proc . setSrvWarehouseEntry ( getSrvWarehouseEntry ( ) ) ; proc . setSrvCogsEntry ( getSrvCogsEntry ( ) ) ; @ SuppressWarnings ( "unchecked" ) IEntityProcessor < IDocWarehouse , Long > delegate = ( IEntityProcessor < IDocWarehouse , Long > ) lazyGetPrcAccDocRetrieve ( pAddParam ) ; proc . setPrcAccDocRetrieve ( delegate ) ; // assigning fully initialized object : this . processorsMap . put ( PrcAccDocCogsRetrieve . class . getSimpleName ( ) , proc ) ; } return proc ;
public class SingleThreadEventExecutor { /** * Take the next { @ link Runnable } from the task queue and so will block if no task is currently present . * Be aware that this method will throw an { @ link UnsupportedOperationException } if the task queue , which was * created via { @ link # newTaskQueue ( ) } , does not implement { @ link BlockingQueue } . * @ return { @ code null } if the executor thread has been interrupted or waken up . */ protected Runnable takeTask ( ) { } }
assert inEventLoop ( ) ; if ( ! ( taskQueue instanceof BlockingQueue ) ) { throw new UnsupportedOperationException ( ) ; } BlockingQueue < Runnable > taskQueue = ( BlockingQueue < Runnable > ) this . taskQueue ; for ( ; ; ) { ScheduledFutureTask < ? > scheduledTask = peekScheduledTask ( ) ; if ( scheduledTask == null ) { Runnable task = null ; try { task = taskQueue . take ( ) ; if ( task == WAKEUP_TASK ) { task = null ; } } catch ( InterruptedException e ) { // Ignore } return task ; } else { long delayNanos = scheduledTask . delayNanos ( ) ; Runnable task = null ; if ( delayNanos > 0 ) { try { task = taskQueue . poll ( delayNanos , TimeUnit . NANOSECONDS ) ; } catch ( InterruptedException e ) { // Waken up . return null ; } } if ( task == null ) { // We need to fetch the scheduled tasks now as otherwise there may be a chance that // scheduled tasks are never executed if there is always one task in the taskQueue . // This is for example true for the read task of OIO Transport // See https : / / github . com / netty / netty / issues / 1614 fetchFromScheduledTaskQueue ( ) ; task = taskQueue . poll ( ) ; } if ( task != null ) { return task ; } } }