signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class LocationHelper { /** * Works in a similar way to modulateCircularLocation but returns * the number of complete passes over a Sequence length a circular * location makes i . e . if we have a sequence of length 10 and the * location 3 . . 52 we make 4 complete passes through the genome to * go from position 3 to position 52. */ public static int completeCircularPasses ( int index , int seqLength ) { } }
int count = 0 ; while ( index > seqLength ) { count ++ ; index -= seqLength ; } return count - 1 ;
public class MaterializedCollectStreamResult { private void processInsert ( Row row ) { } }
// limit the materialized table if ( materializedTable . size ( ) - validRowPosition >= maxRowCount ) { cleanUp ( ) ; } materializedTable . add ( row ) ; rowPositionCache . put ( row , materializedTable . size ( ) - 1 ) ;
public class TargetHttpProxyClient { /** * Returns the specified TargetHttpProxy resource . Gets a list of available target HTTP proxies by * making a list ( ) request . * < p > Sample code : * < pre > < code > * try ( TargetHttpProxyClient targetHttpProxyClient = TargetHttpProxyClient . create ( ) ) { * ProjectGlobalTargetHttpProxyName targetHttpProxy = ProjectGlobalTargetHttpProxyName . of ( " [ PROJECT ] " , " [ TARGET _ HTTP _ PROXY ] " ) ; * TargetHttpProxy response = targetHttpProxyClient . getTargetHttpProxy ( targetHttpProxy . toString ( ) ) ; * < / code > < / pre > * @ param targetHttpProxy Name of the TargetHttpProxy resource to return . * @ throws com . google . api . gax . rpc . ApiException if the remote call fails */ @ BetaApi public final TargetHttpProxy getTargetHttpProxy ( String targetHttpProxy ) { } }
GetTargetHttpProxyHttpRequest request = GetTargetHttpProxyHttpRequest . newBuilder ( ) . setTargetHttpProxy ( targetHttpProxy ) . build ( ) ; return getTargetHttpProxy ( request ) ;
public class BitfinexApiCallbackListeners { /** * registers listener for orderbook events * @ param listener of event * @ return hook of this listener */ public Closeable onOrderbookEvent ( final BiConsumer < BitfinexOrderBookSymbol , Collection < BitfinexOrderBookEntry > > listener ) { } }
orderbookEntryConsumers . offer ( listener ) ; return ( ) -> orderbookEntryConsumers . remove ( listener ) ;
public class TwitterRiver { /** * Get users id of each list to stream them . * @ param tUserlists List of user list . Should be a public list . * @ return */ private long [ ] getUsersListMembers ( String [ ] tUserlists ) { } }
logger . debug ( "Fetching user id of given lists" ) ; List < Long > listUserIdToFollow = new ArrayList < Long > ( ) ; Configuration cb = buildTwitterConfiguration ( ) ; Twitter twitterImpl = new TwitterFactory ( cb ) . getInstance ( ) ; // For each list given in parameter for ( String listId : tUserlists ) { logger . debug ( "Adding users of list {} " , listId ) ; String [ ] splitListId = listId . split ( "/" ) ; try { long cursor = - 1 ; PagableResponseList < User > itUserListMembers ; do { itUserListMembers = twitterImpl . getUserListMembers ( splitListId [ 0 ] , splitListId [ 1 ] , cursor ) ; for ( User member : itUserListMembers ) { long userId = member . getId ( ) ; listUserIdToFollow . add ( userId ) ; } } while ( ( cursor = itUserListMembers . getNextCursor ( ) ) != 0 ) ; } catch ( TwitterException te ) { logger . error ( "Failed to get list members for : {}" , listId , te ) ; } } // Just casting from Long to long long ret [ ] = new long [ listUserIdToFollow . size ( ) ] ; int pos = 0 ; for ( Long userId : listUserIdToFollow ) { ret [ pos ] = userId ; pos ++ ; } return ret ;
public class IfcLightDistributionDataImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ SuppressWarnings ( "unchecked" ) public EList < String > getLuminousIntensityAsString ( ) { } }
return ( EList < String > ) eGet ( Ifc2x3tc1Package . Literals . IFC_LIGHT_DISTRIBUTION_DATA__LUMINOUS_INTENSITY_AS_STRING , true ) ;
public class IndexFieldTypePollerPeriodical { /** * Cancel the polling job for the given { @ link ScheduledFuture } . * @ param future polling job future */ private void cancel ( @ Nullable ScheduledFuture < ? > future ) { } }
if ( future != null && ! future . isCancelled ( ) ) { if ( ! future . cancel ( true ) ) { LOG . warn ( "Couldn't cancel field type update job" ) ; } }
public class ThreadPoolExecutor { /** * Tries to remove from the work queue all { @ link Future } * tasks that have been cancelled . This method can be useful as a * storage reclamation operation , that has no other impact on * functionality . Cancelled tasks are never executed , but may * accumulate in work queues until worker threads can actively * remove them . Invoking this method instead tries to remove them now . * However , this method may fail to remove tasks in * the presence of interference by other threads . */ public void purge ( ) { } }
final BlockingQueue < Runnable > q = workQueue ; try { Iterator < Runnable > it = q . iterator ( ) ; while ( it . hasNext ( ) ) { Runnable r = it . next ( ) ; if ( r instanceof Future < ? > && ( ( Future < ? > ) r ) . isCancelled ( ) ) it . remove ( ) ; } } catch ( ConcurrentModificationException fallThrough ) { // Take slow path if we encounter interference during traversal . // Make copy for traversal and call remove for cancelled entries . // The slow path is more likely to be O ( N * N ) . for ( Object r : q . toArray ( ) ) if ( r instanceof Future < ? > && ( ( Future < ? > ) r ) . isCancelled ( ) ) q . remove ( r ) ; } tryTerminate ( ) ; // In case SHUTDOWN and now empty
public class SQLParser { /** * Build a pattern segment to accept a single optional EXPORT or PARTITION clause * to modify CREATE STREAM statements . * @ param captureTokens Capture individual tokens if true * @ return Inner pattern to be wrapped by the caller as appropriate * Capture groups ( when captureTokens is true ) : * ( 1 ) EXPORT clause : target name * ( 2 ) PARTITION clause : column name */ private static SQLPatternPart makeInnerStreamModifierClausePattern ( boolean captureTokens ) { } }
return SPF . oneOf ( SPF . clause ( SPF . token ( "export" ) , SPF . token ( "to" ) , SPF . token ( "target" ) , SPF . group ( captureTokens , SPF . databaseObjectName ( ) ) ) , SPF . clause ( SPF . token ( "partition" ) , SPF . token ( "on" ) , SPF . token ( "column" ) , SPF . group ( captureTokens , SPF . databaseObjectName ( ) ) ) ) ;
public class RandomUtil { /** * 生成RGB随机数 * @ return */ public static int [ ] getRandomRgb ( ) { } }
int [ ] rgb = new int [ 3 ] ; for ( int i = 0 ; i < 3 ; i ++ ) { rgb [ i ] = random . nextInt ( 255 ) ; } return rgb ;
public class ArrayTrie { @ Override public V getBest ( byte [ ] b , int offset , int len ) { } }
return getBest ( 0 , b , offset , len ) ;
public class SmsSendResponseDto { /** * Is successfully sent boolean . * @ return the boolean */ @ XmlElement ( name = "successfullySent" , required = true ) @ JsonProperty ( value = "successfullySent" , required = true ) public boolean isSuccessfullySent ( ) { } }
return successfullySent ;
public class ContainerKeyCache { /** * Updates the tail cache for the given Table Segment with the contents of the given { @ link TableKeyBatch } . * Each { @ link TableKeyBatch . Item } is updated only if no previous entry exists with its { @ link TableKeyBatch . Item # getHash ( ) } * or if its { @ link TableKeyBatch . Item # getOffset ( ) } is greater than the existing entry ' s offset . * This method should be used for processing new updates to the Index ( as opposed from bulk - loading already indexed keys ) . * @ param segmentId Segment Id that the { @ link TableKeyBatch } Items belong to . * @ param batch An { @ link TableKeyBatch } containing items to accept into the Cache . * @ param batchOffset Offset in the Segment where the first item in the { @ link TableKeyBatch } has been written to . * @ return A List of offsets for each item in the { @ link TableKeyBatch } ( in the same order ) of where the latest value * for that item ' s Key exists now . */ List < Long > includeUpdateBatch ( long segmentId , TableKeyBatch batch , long batchOffset ) { } }
SegmentKeyCache cache ; int generation ; synchronized ( this . segmentCaches ) { generation = this . currentCacheGeneration ; cache = this . segmentCaches . computeIfAbsent ( segmentId , s -> new SegmentKeyCache ( s , this . cache ) ) ; } return cache . includeUpdateBatch ( batch , batchOffset , generation ) ;
public class GBSTree { /** * Delete from right side with successor node present . */ private void rightDeleteWithSuccessor ( GBSNode p , GBSNode l , Object deleteKey , DeleteNode point ) { } }
java . util . Comparator comp = p . deleteComparator ( ) ; int xcc = comp . compare ( deleteKey , l . leftMostKey ( ) ) ; if ( xcc == 0 ) /* Target key IS low key of successor */ { /* Migrate up high key of current ( delete key ) into low key of successor ( target key ) */ point . setDelete ( p , p . rightMostIndex ( ) ) ; point . setTarget ( l , 0 , DeleteNode . OVERLAY_LEFT ) ; } else if ( xcc < 0 ) /* Less than left most key of successor */ { /* Target is in right of current */ int ix = p . findDeleteInRight ( deleteKey ) ; if ( ix >= 0 ) point . setDelete ( p , ix ) ; } else /* Greater than left most of successor */ { /* Target is in left of successor */ int ix = l . findDeleteInLeft ( deleteKey ) ; if ( ix >= 0 ) { /* Migrate up high key of current ( delete key ) into low key of successor ( hole left after keys moved ) */ point . setDelete ( p , p . rightMostIndex ( ) ) ; point . setTarget ( l , ix , DeleteNode . ADD_LEFT ) ; } }
public class GroupDiscussInterface { /** * Get info for a given topic * @ param topicId * Unique identifier of a topic for a given group { @ link Topic } . * @ return A group topic * @ throws FlickrException * @ see < a href = " http : / / www . flickr . com / services / api / flickr . groups . discuss . topics . getInfo . html " > API Documentation < / a > */ public Topic getTopicInfo ( String topicId ) throws FlickrException { } }
Map < String , Object > parameters = new HashMap < String , Object > ( ) ; parameters . put ( "method" , METHOD_TOPICS_GET_INFO ) ; parameters . put ( "topic_id" , topicId ) ; Response response = transportAPI . get ( transportAPI . getPath ( ) , parameters , apiKey , sharedSecret ) ; if ( response . isError ( ) ) { throw new FlickrException ( response . getErrorCode ( ) , response . getErrorMessage ( ) ) ; } Element topicElement = response . getPayload ( ) ; return parseTopic ( topicElement ) ;
public class NonMaxBlock_MT { /** * Detects local minimums and / or maximums in the provided intensity image . * @ param intensityImage ( Input ) Feature intensity image . * @ param localMin ( Output ) storage for found local minimums . * @ param localMax ( Output ) storage for found local maximums . */ @ Override public void process ( GrayF32 intensityImage , @ Nullable QueueCorner localMin , @ Nullable QueueCorner localMax ) { } }
if ( localMin != null ) localMin . reset ( ) ; if ( localMax != null ) localMax . reset ( ) ; // the defines the region that can be processed int endX = intensityImage . width - border ; int endY = intensityImage . height - border ; int step = configuration . radius + 1 ; search . initialize ( configuration , intensityImage , localMin , localMax ) ; // Compute number of y iterations int range = endY - border ; int N = range / step ; if ( range > N * step ) N += 1 ; BoofConcurrency . loopFor ( 0 , N , iterY -> { NonMaxBlock . Search search ; QueueCorner threadMin = null , threadMax = null ; // get work space for this thread synchronized ( lock ) { if ( searches . isEmpty ( ) ) { search = this . search . newInstance ( ) ; } else { search = searches . remove ( searches . size ( ) - 1 ) ; } if ( search . isDetectMinimums ( ) ) { threadMin = pop ( ) ; } if ( search . isDetectMaximums ( ) ) { threadMax = pop ( ) ; } } search . initialize ( configuration , intensityImage , threadMin , threadMax ) ; // search for local peaks along this block row int y = border + iterY * step ; int y1 = y + step ; if ( y1 > endY ) y1 = endY ; for ( int x = border ; x < endX ; x += step ) { int x1 = x + step ; if ( x1 > endX ) x1 = endX ; search . searchBlock ( x , y , x1 , y1 ) ; } // Save the results and recycle thread working space synchronized ( lock ) { saveResults ( localMin , threadMin ) ; saveResults ( localMax , threadMax ) ; searches . add ( search ) ; if ( threadMin != null ) cornerLists . add ( threadMin ) ; if ( threadMax != null ) cornerLists . add ( threadMax ) ; } } ) ;
public class JsApiMessageImpl { /** * Clear all of the smoke - and - mirrors properties which are clearable . * Modifyable JMS Header fields ( e . g . JMSType ) are not affected . * The method has package level visibility as it is used by JsJmsMessageImpl * and JsSdoMessageImpl . */ final void clearSmokeAndMirrorsProperties ( ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( this , tc , "clearSmokeAndMirrorsProperties" ) ; /* JMSXAppId */ getHdr2 ( ) . setChoiceField ( JsHdr2Access . XAPPID , JsHdr2Access . IS_XAPPID_EMPTY ) ; /* JMSXUserID */ setUserid ( null ) ; /* JMSXDeliveryCount is not clearable */ /* JMS _ IBM _ ExceptionXxxxx are not clearable */ /* JMS _ IBM _ Feedback */ getApi ( ) . setChoiceField ( JsApiAccess . REPORTFEEDBACK , JsApiAccess . IS_REPORTFEEDBACK_UNSET ) ; getApi ( ) . setChoiceField ( JsApiAccess . REPORTFEEDBACKINT , JsApiAccess . IS_REPORTFEEDBACKINT_UNSET ) ; /* JMS _ IBM _ ReportXxxxxx */ getHdr2 ( ) . setChoiceField ( JsHdr2Access . REPORTEXPIRY , JsHdr2Access . IS_REPORTEXPIRY_UNSET ) ; getHdr2 ( ) . setChoiceField ( JsHdr2Access . REPORTCOA , JsHdr2Access . IS_REPORTCOA_UNSET ) ; getHdr2 ( ) . setChoiceField ( JsHdr2Access . REPORTCOD , JsHdr2Access . IS_REPORTCOD_UNSET ) ; getApi ( ) . setChoiceField ( JsApiAccess . REPORTEXCEPTION , JsApiAccess . IS_REPORTEXCEPTION_UNSET ) ; getApi ( ) . setChoiceField ( JsApiAccess . REPORTPAN , JsApiAccess . IS_REPORTPAN_UNSET ) ; getApi ( ) . setChoiceField ( JsApiAccess . REPORTNAN , JsApiAccess . IS_REPORTNAN_UNSET ) ; getApi ( ) . setChoiceField ( JsApiAccess . REPORTPASSMSGID , JsApiAccess . IS_REPORTPASSMSGID_UNSET ) ; getApi ( ) . setChoiceField ( JsApiAccess . REPORTPASSCORRELID , JsApiAccess . IS_REPORTPASSCORRELID_UNSET ) ; getApi ( ) . setChoiceField ( JsApiAccess . REPORTDISCARDMSG , JsApiAccess . IS_REPORTDISCARDMSG_UNSET ) ; /* JMS _ IBM _ ArmCorrelator & JMS _ TOG _ ARM _ Correlator */ setARMCorrelator ( null ) ; /* transportVersion */ getHdr2 ( ) . setChoiceField ( JsHdr2Access . TRANSPORTVERSION , JsHdr2Access . IS_TRANSPORTVERSION_EMPTY ) ; // JMS _ IBM _ Character _ Set and JMS _ IBM _ Encoding are not really smoke - and - mirrors // properties , but they do need to be cleared in the message itself too . d395685 setCcsid ( null ) ; setEncoding ( null ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( this , tc , "clearSmokeAndMirrorsProperties" ) ;
public class ViewUrl { /** * Get Resource Url for GetViewDocuments * @ param documentListName Name of content documentListName to delete * @ param filter A set of filter expressions representing the search parameters for a query . This parameter is optional . Refer to [ Sorting and Filtering ] ( . . / . . / . . / . . / Developer / api - guides / sorting - filtering . htm ) for a list of supported filters . * @ param includeInactive Include inactive content . * @ param pageSize When creating paged results from a query , this value indicates the zero - based offset in the complete result set where the returned entities begin . For example , with this parameter set to 25 , to get the 51st through the 75th items , set startIndex to 50. * @ param responseFields Filtering syntax appended to an API call to increase or decrease the amount of data returned inside a JSON object . This parameter should only be used to retrieve data . Attempting to update data using this parameter may cause data loss . * @ param sortBy The element to sort the results by and the channel in which the results appear . Either ascending ( a - z ) or descending ( z - a ) channel . Optional . Refer to [ Sorting and Filtering ] ( . . / . . / . . / . . / Developer / api - guides / sorting - filtering . htm ) for more information . * @ param startIndex When creating paged results from a query , this value indicates the zero - based offset in the complete result set where the returned entities begin . For example , with pageSize set to 25 , to get the 51st through the 75th items , set this parameter to 50. * @ param viewName The name for a view . Views are used to render data in , such as document and entity lists . Each view includes a schema , format , name , ID , and associated data types to render . * @ return String Resource Url */ public static MozuUrl getViewDocumentsUrl ( String documentListName , String filter , Boolean includeInactive , Integer pageSize , String responseFields , String sortBy , Integer startIndex , String viewName ) { } }
UrlFormatter formatter = new UrlFormatter ( "/api/content/documentlists/{documentListName}/views/{viewName}/documents?filter={filter}&sortBy={sortBy}&pageSize={pageSize}&startIndex={startIndex}&includeInactive={includeInactive}&responseFields={responseFields}" ) ; formatter . formatUrl ( "documentListName" , documentListName ) ; formatter . formatUrl ( "filter" , filter ) ; formatter . formatUrl ( "includeInactive" , includeInactive ) ; formatter . formatUrl ( "pageSize" , pageSize ) ; formatter . formatUrl ( "responseFields" , responseFields ) ; formatter . formatUrl ( "sortBy" , sortBy ) ; formatter . formatUrl ( "startIndex" , startIndex ) ; formatter . formatUrl ( "viewName" , viewName ) ; return new MozuUrl ( formatter . getResourceUrl ( ) , MozuUrl . UrlLocation . TENANT_POD ) ;
public class GoogleMapShapeConverter { /** * Add a shape to the map as markers * @ param map google map * @ param shape google map shape * @ param markerOptions marker options * @ param polylineMarkerOptions polyline marker options * @ param polygonMarkerOptions polygon marker options * @ param polygonMarkerHoleOptions polygon marker hole options * @ param globalPolylineOptions global polyline options * @ param globalPolygonOptions global polygon options * @ return google map shape markers */ public GoogleMapShapeMarkers addShapeToMapAsMarkers ( GoogleMap map , GoogleMapShape shape , MarkerOptions markerOptions , MarkerOptions polylineMarkerOptions , MarkerOptions polygonMarkerOptions , MarkerOptions polygonMarkerHoleOptions , PolylineOptions globalPolylineOptions , PolygonOptions globalPolygonOptions ) { } }
GoogleMapShapeMarkers shapeMarkers = new GoogleMapShapeMarkers ( ) ; GoogleMapShape addedShape = null ; switch ( shape . getShapeType ( ) ) { case LAT_LNG : if ( markerOptions == null ) { markerOptions = new MarkerOptions ( ) ; } Marker latLngMarker = addLatLngToMap ( map , ( LatLng ) shape . getShape ( ) , markerOptions ) ; shapeMarkers . add ( latLngMarker ) ; addedShape = new GoogleMapShape ( shape . getGeometryType ( ) , GoogleMapShapeType . MARKER , latLngMarker ) ; break ; case MARKER_OPTIONS : MarkerOptions shapeMarkerOptions = ( MarkerOptions ) shape . getShape ( ) ; if ( markerOptions != null ) { shapeMarkerOptions . icon ( markerOptions . getIcon ( ) ) ; shapeMarkerOptions . anchor ( markerOptions . getAnchorU ( ) , markerOptions . getAnchorV ( ) ) ; shapeMarkerOptions . draggable ( markerOptions . isDraggable ( ) ) ; shapeMarkerOptions . visible ( markerOptions . isVisible ( ) ) ; shapeMarkerOptions . zIndex ( markerOptions . getZIndex ( ) ) ; } Marker markerOptionsMarker = addMarkerOptionsToMap ( map , shapeMarkerOptions ) ; shapeMarkers . add ( markerOptionsMarker ) ; addedShape = new GoogleMapShape ( shape . getGeometryType ( ) , GoogleMapShapeType . MARKER , markerOptionsMarker ) ; break ; case POLYLINE_OPTIONS : PolylineMarkers polylineMarkers = addPolylineToMapAsMarkers ( map , ( PolylineOptions ) shape . getShape ( ) , polylineMarkerOptions , globalPolylineOptions ) ; shapeMarkers . add ( polylineMarkers ) ; addedShape = new GoogleMapShape ( shape . getGeometryType ( ) , GoogleMapShapeType . POLYLINE_MARKERS , polylineMarkers ) ; break ; case POLYGON_OPTIONS : PolygonMarkers polygonMarkers = addPolygonToMapAsMarkers ( shapeMarkers , map , ( PolygonOptions ) shape . getShape ( ) , polygonMarkerOptions , polygonMarkerHoleOptions , globalPolygonOptions ) ; shapeMarkers . add ( polygonMarkers ) ; addedShape = new GoogleMapShape ( shape . getGeometryType ( ) , GoogleMapShapeType . POLYGON_MARKERS , polygonMarkers ) ; break ; case MULTI_LAT_LNG : MultiLatLng multiLatLng = ( MultiLatLng ) shape . getShape ( ) ; if ( markerOptions != null ) { multiLatLng . setMarkerOptions ( markerOptions ) ; } MultiMarker multiMarker = addLatLngsToMap ( map , multiLatLng ) ; shapeMarkers . add ( multiMarker ) ; addedShape = new GoogleMapShape ( shape . getGeometryType ( ) , GoogleMapShapeType . MULTI_MARKER , multiMarker ) ; break ; case MULTI_POLYLINE_OPTIONS : MultiPolylineMarkers multiPolylineMarkers = addMultiPolylineToMapAsMarkers ( shapeMarkers , map , ( MultiPolylineOptions ) shape . getShape ( ) , polylineMarkerOptions , globalPolylineOptions ) ; addedShape = new GoogleMapShape ( shape . getGeometryType ( ) , GoogleMapShapeType . MULTI_POLYLINE_MARKERS , multiPolylineMarkers ) ; break ; case MULTI_POLYGON_OPTIONS : MultiPolygonMarkers multiPolygonMarkers = addMultiPolygonToMapAsMarkers ( shapeMarkers , map , ( MultiPolygonOptions ) shape . getShape ( ) , polygonMarkerOptions , polygonMarkerHoleOptions , globalPolygonOptions ) ; addedShape = new GoogleMapShape ( shape . getGeometryType ( ) , GoogleMapShapeType . MULTI_POLYGON_MARKERS , multiPolygonMarkers ) ; break ; case COLLECTION : List < GoogleMapShape > addedShapeList = new ArrayList < GoogleMapShape > ( ) ; @ SuppressWarnings ( "unchecked" ) List < GoogleMapShape > shapeList = ( List < GoogleMapShape > ) shape . getShape ( ) ; for ( GoogleMapShape shapeListItem : shapeList ) { GoogleMapShapeMarkers shapeListItemMarkers = addShapeToMapAsMarkers ( map , shapeListItem , markerOptions , polylineMarkerOptions , polygonMarkerOptions , polygonMarkerHoleOptions , globalPolylineOptions , globalPolygonOptions ) ; shapeMarkers . add ( shapeListItemMarkers ) ; addedShapeList . add ( shapeListItemMarkers . getShape ( ) ) ; } addedShape = new GoogleMapShape ( shape . getGeometryType ( ) , GoogleMapShapeType . COLLECTION , addedShapeList ) ; break ; default : throw new GeoPackageException ( "Unsupported Shape Type: " + shape . getShapeType ( ) ) ; } shapeMarkers . setShape ( addedShape ) ; return shapeMarkers ;
public class HubVirtualNetworkConnectionsInner { /** * Retrieves the details of a HubVirtualNetworkConnection . * @ param resourceGroupName The resource group name of the VirtualHub . * @ param virtualHubName The name of the VirtualHub . * @ param connectionName The name of the vpn connection . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the HubVirtualNetworkConnectionInner object */ public Observable < HubVirtualNetworkConnectionInner > getAsync ( String resourceGroupName , String virtualHubName , String connectionName ) { } }
return getWithServiceResponseAsync ( resourceGroupName , virtualHubName , connectionName ) . map ( new Func1 < ServiceResponse < HubVirtualNetworkConnectionInner > , HubVirtualNetworkConnectionInner > ( ) { @ Override public HubVirtualNetworkConnectionInner call ( ServiceResponse < HubVirtualNetworkConnectionInner > response ) { return response . body ( ) ; } } ) ;
public class ScriptController { /** * Add a new script * @ param model * @ param name * @ param script * @ return * @ throws Exception */ @ RequestMapping ( value = "/api/scripts" , method = RequestMethod . POST ) public @ ResponseBody Script addScript ( Model model , @ RequestParam ( required = true ) String name , @ RequestParam ( required = true ) String script ) throws Exception { } }
return ScriptService . getInstance ( ) . addScript ( name , script ) ;
public class PackageManagerUtils { /** * Checks if the device has a home screen . * @ param manager the package manager . * @ return { @ code true } if the device has a home screen . */ @ TargetApi ( Build . VERSION_CODES . JELLY_BEAN_MR2 ) public static boolean hasHomeScreenFeature ( PackageManager manager ) { } }
return manager . hasSystemFeature ( PackageManager . FEATURE_HOME_SCREEN ) ;
public class ConsumerMonitoring { /** * Method registerConsumerSetMonitor * Checks whether there are existing registrations on this topicspace and * discriminator combination . If there are then we will match the existing set * of subscriptions associated with it - we need only add the supplied callback to * the existing table in the ConsumerMonitorRegistrar . * If there are no existing registrations on this topicspace and discriminator * combination , then we have additional work to do . We must determine the set * of matching consumers and add a new entry to the ConsumerMonitorRegistrar . * Returns true if the potential set of consumers is currently greater than zero , * false if it is zero . * @ param topicSpace * @ param topicSpaceUuid * @ param discriminatorExpression * @ param combinedExpression * @ param callback * @ param isWildcarded * @ param wildcardStem * @ param mpm * @ return * @ throws SIDiscriminatorSyntaxException * @ throws SIErrorException */ public boolean registerConsumerSetMonitor ( ConnectionImpl connection , DestinationHandler topicSpace , SIBUuid12 topicSpaceUuid , String discriminatorExpression , String combinedExpression , ConsumerSetChangeCallback callback , boolean isWildcarded , String wildcardStem , MessageProcessorMatching mpm ) throws SIDiscriminatorSyntaxException , SIErrorException { } }
if ( tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "registerConsumerSetMonitor" , new Object [ ] { connection , topicSpace , topicSpaceUuid , discriminatorExpression , combinedExpression , callback , new Boolean ( isWildcarded ) , wildcardStem } ) ; boolean areConsumers = false ; // Test whether there are existing registrations on this topicspace . If so , // we will match the existing set of subscriptions associated with it . if ( _consumerMonitorRegistrar . checkExistingExpression ( combinedExpression , isWildcarded ) ) { // topic expression has been used by another registration , in this case // we need to add our callback entry to the registrations table only areConsumers = _consumerMonitorRegistrar . registerCallbackOnExistingExpression ( connection , combinedExpression , isWildcarded , callback ) ; } else { // A new topic expression . We ' ll have to find the set of consumers that map // to it . Set localConsumers = new HashSet ( ) ; // We ' ll use these sets where we have Set remoteConsumers = new HashSet ( ) ; // searched the matchspace // The style of processing depends on whether the discriminator in the // registration is wildcarded if ( ! isWildcarded ) { mpm . retrieveNonSelectorConsumers ( topicSpace , discriminatorExpression , localConsumers , remoteConsumers ) ; // Now we need to check whether there are any subscriptions with selectors // that " might " match this expression . _subscriptionRegistrar . findMatchingSelectorSubs ( combinedExpression , localConsumers ) ; } else { // If wildcarded use MatchSpace direct evaluation code and string matching _subscriptionRegistrar . findCandidateSubsForWildcardExpr ( combinedExpression , wildcardStem , localConsumers ) ; } // Now we perform the actual registration of our monitor areConsumers = registerCallbackOnNewExpression ( connection , combinedExpression , isWildcarded , callback , localConsumers , remoteConsumers ) ; } if ( tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "registerConsumerSetMonitor" , new Boolean ( areConsumers ) ) ; return areConsumers ;
public class RowFormatter { /** * Appends rows */ protected static void appendRows ( final Iterator < ResultRow > it , final Appender ap , final Charset charset , final Formatting fmt , final Iterable < ColumnType > cols ) { } }
int i = 0 ; while ( it . hasNext ( ) ) { if ( i ++ > 0 ) { ap . append ( "\r\n" ) ; } // end of if ap . append ( " " ) ; ap . append ( fmt . rowStart ) ; appendValues ( it . next ( ) , ap , charset , fmt , cols . iterator ( ) , 0 ) ; ap . append ( fmt . rowEnd ) ; } // end of while ap . append ( ";" ) ;
public class Configuration { /** * Get the value at the given index from the result set * @ param < T > type to return * @ param rs result set * @ param path path * @ param i one based index in result set row * @ param clazz type * @ return value * @ throws SQLException */ @ Nullable public < T > T get ( ResultSet rs , @ Nullable Path < ? > path , int i , Class < T > clazz ) throws SQLException { } }
return getType ( path , clazz ) . getValue ( rs , i ) ;
public class BaseWindowedBolt { /** * define a tumbling event time window * @ param size window size */ public BaseWindowedBolt < T > eventTimeWindow ( Time size ) { } }
long s = size . toMilliseconds ( ) ; ensurePositiveTime ( s ) ; setSizeAndSlide ( s , DEFAULT_SLIDE ) ; this . windowAssigner = TumblingEventTimeWindows . of ( s ) ; return this ;
public class ScanCollectionDefault { /** * Convenience method , calls { @ link # getNextScanAtMsLevel ( int , int ) } internally * @ param scan an existing scan from THIS ScanCollection */ @ Override public IScan getNextScanAtSameMsLevel ( IScan scan ) { } }
return getNextScanAtMsLevel ( scan . getNum ( ) , scan . getMsLevel ( ) ) ;
public class Radial2Top { /** * < editor - fold defaultstate = " collapsed " desc = " Areas related " > */ private void createAreas ( final BufferedImage IMAGE ) { } }
if ( ! getAreas ( ) . isEmpty ( ) && bImage != null ) { final double ORIGIN_CORRECTION = 180.0 ; final double OUTER_RADIUS = bImage . getWidth ( ) * 0.38f ; final double RADIUS ; if ( isSectionsVisible ( ) ) { RADIUS = isExpandedSectionsEnabled ( ) ? OUTER_RADIUS - bImage . getWidth ( ) * 0.12f : OUTER_RADIUS - bImage . getWidth ( ) * 0.04f ; } else { RADIUS = OUTER_RADIUS ; } final double FREE_AREA = bImage . getWidth ( ) / 2.0 - RADIUS ; for ( Section area : getAreas ( ) ) { final double ANGLE_START ; final double ANGLE_EXTEND ; if ( ! isLogScale ( ) ) { ANGLE_START = ORIGIN_CORRECTION - ( area . getStart ( ) * Math . toDegrees ( getAngleStep ( ) ) ) ; ANGLE_EXTEND = - ( area . getStop ( ) - area . getStart ( ) ) * Math . toDegrees ( getAngleStep ( ) ) ; } else { ANGLE_START = ORIGIN_CORRECTION - ( UTIL . logOfBase ( BASE , area . getStart ( ) ) * Math . toDegrees ( getLogAngleStep ( ) ) ) ; ANGLE_EXTEND = - UTIL . logOfBase ( BASE , area . getStop ( ) - area . getStart ( ) ) * Math . toDegrees ( getLogAngleStep ( ) ) ; } final Arc2D AREA = new Arc2D . Double ( Arc2D . PIE ) ; AREA . setFrame ( bImage . getMinX ( ) + FREE_AREA , bImage . getMinY ( ) + FREE_AREA , 2 * RADIUS , 2 * RADIUS ) ; AREA . setAngleStart ( ANGLE_START ) ; AREA . setAngleExtent ( ANGLE_EXTEND ) ; area . setFilledArea ( AREA ) ; } // Draw the area if ( isAreasVisible ( ) && IMAGE != null ) { final Graphics2D G2 = IMAGE . createGraphics ( ) ; G2 . setRenderingHint ( RenderingHints . KEY_ANTIALIASING , RenderingHints . VALUE_ANTIALIAS_ON ) ; for ( Section area : getAreas ( ) ) { G2 . setColor ( isTransparentAreasEnabled ( ) ? area . getTransparentColor ( ) : area . getColor ( ) ) ; G2 . fill ( area . getFilledArea ( ) ) ; if ( area3DEffectVisible ) { G2 . setPaint ( area3DEffect ) ; G2 . fill ( area . getFilledArea ( ) ) ; } } G2 . dispose ( ) ; } }
public class CompilerEnvirons { /** * Returns a { @ code CompilerEnvirons } suitable for using Rhino * in an IDE environment . Most features are enabled by default . * The { @ link ErrorReporter } is set to an { @ link ErrorCollector } . */ public static CompilerEnvirons ideEnvirons ( ) { } }
CompilerEnvirons env = new CompilerEnvirons ( ) ; env . setRecoverFromErrors ( true ) ; env . setRecordingComments ( true ) ; env . setStrictMode ( true ) ; env . setWarnTrailingComma ( true ) ; env . setLanguageVersion ( 170 ) ; env . setReservedKeywordAsIdentifier ( true ) ; env . setIdeMode ( true ) ; env . setErrorReporter ( new ErrorCollector ( ) ) ; return env ;
public class MessageInfo { /** * Set up the key areas . */ public void setupKeys ( ) { } }
KeyAreaInfo keyArea = null ; keyArea = new KeyAreaInfo ( this , Constants . UNIQUE , ID_KEY ) ; keyArea . addKeyField ( ID , Constants . ASCENDING ) ; keyArea = new KeyAreaInfo ( this , Constants . NOT_UNIQUE , DESCRIPTION_KEY ) ; keyArea . addKeyField ( DESCRIPTION , Constants . ASCENDING ) ; keyArea = new KeyAreaInfo ( this , Constants . SECONDARY_KEY , CODE_KEY ) ; keyArea . addKeyField ( CODE , Constants . ASCENDING ) ; keyArea = new KeyAreaInfo ( this , Constants . NOT_UNIQUE , MESSAGE_INFO_TYPE_ID_KEY ) ; keyArea . addKeyField ( MESSAGE_INFO_TYPE_ID , Constants . ASCENDING ) ; keyArea . addKeyField ( CONTACT_TYPE_ID , Constants . ASCENDING ) ; keyArea . addKeyField ( REQUEST_TYPE_ID , Constants . ASCENDING ) ;
public class DatasetId { /** * Creates a dataset identity given project ' s and dataset ' s user - defined ids . */ public static DatasetId of ( String project , String dataset ) { } }
return new DatasetId ( checkNotNull ( project ) , checkNotNull ( dataset ) ) ;
public class EbeanDynamicEvolutions { /** * Initialise the Ebean servers . */ public void start ( ) { } }
config . serverConfigs ( ) . forEach ( ( key , serverConfig ) -> servers . put ( key , EbeanServerFactory . create ( serverConfig ) ) ) ;
public class WebServiceConfiguration { /** * Accessor for the example property . Uses ConfigurationProperty annotation to provide property metadata to the * application . */ @ ConfigurationProperty ( displayMessageKey = "ENDPOINT_DISPLAY" , helpMessageKey = "ENDPOINT_HELP" , confidential = false , order = 1 , required = true ) public String getEndpoint ( ) { } }
return endpoint ;
public class EncodingUtils { /** * Get the encoding for a passed in locale . * @ param locale The locale . * @ return The encoding . */ public static String getEncodingFromLocale ( Locale locale ) { } }
init ( ) ; if ( locale == cachedLocale ) { return cachedEncoding ; } String encoding = null ; /* ( String ) _ localeMap . get ( locale . toString ( ) ) ; if ( encoding = = null ) { encoding = ( String ) _ localeMap . get ( locale . getLanguage ( ) + " _ " + locale . getCountry ( ) ) ; if ( encoding = = null ) { encoding = ( String ) _ localeMap . get ( locale . getLanguage ( ) ) ; */ if ( encoding == null ) { // check the com . ibm . wsspi . http . EncodingUtils com . ibm . wsspi . http . EncodingUtils encodingUtils = com . ibm . ws . webcontainer . osgi . WebContainer . getEncodingUtils ( ) ; if ( encodingUtils != null ) { encoding = encodingUtils . getEncodingFromLocale ( locale ) ; } } cachedEncoding = encoding ; cachedLocale = locale ; return encoding ;
public class NfsFileBase { /** * ( non - Javadoc ) * @ see com . emc . ecs . nfsclient . nfs . NfsFile # makeReadRequest ( long , int ) */ public NfsReadRequest makeReadRequest ( long offset , int size ) throws IOException { } }
return getNfs ( ) . makeReadRequest ( getFileHandle ( ) , offset , size ) ;
public class Predictor { /** * The SYSTEM command line includes parameters necessary to define the * system configuration . * @ param pwr PWR indicates the transmitter power in kilowatts . * @ param noise XNOISE indicates the expected man - made noise level . * @ param rsn RSN indicates the required signal - to - noise and is the ratio of * the hourly median signal power in the occupied bandwidth relative to the * hourly median noise in a 1 Hz bandwidth , which is necessary to provide * the type and quality of service required ( expressed in decibels ) . * @ return */ public Predictor system ( double pwr , Noise noise , RSN rsn ) { } }
return system ( pwr , noise . getValue ( ) , 3 , 90 , rsn . getSnRatio ( ) , 3 , 0.1 ) ;
public class LevelsLinear { /** * Set RGB input range . * @ param inRGB Range . */ public void setInRGB ( IntRange inRGB ) { } }
this . inRed = inRGB ; this . inGreen = inRGB ; this . inBlue = inRGB ; CalculateMap ( inRGB , outRed , mapRed ) ; CalculateMap ( inRGB , outGreen , mapGreen ) ; CalculateMap ( inRGB , outBlue , mapBlue ) ;
public class WebUtils { /** * Resolves a view for the given view name and controller name * @ param request The request * @ param viewName The view name * @ param controllerName The controller name * @ param viewResolver The resolver * @ return A View or null * @ throws Exception Thrown if an error occurs */ public static View resolveView ( HttpServletRequest request , String viewName , String controllerName , ViewResolver viewResolver ) throws Exception { } }
GrailsWebRequest webRequest = GrailsWebRequest . lookup ( request ) ; Locale locale = webRequest != null ? webRequest . getLocale ( ) : Locale . getDefault ( ) ; return viewResolver . resolveViewName ( addViewPrefix ( viewName , controllerName ) , locale ) ;
public class CmsJspNavElement { /** * Returns the filename of the navigation element , i . e . * the name of the navigation resource without any path information . < p > * @ return the filename of the navigation element , i . e . * the name of the navigation resource without any path information */ public String getFileName ( ) { } }
if ( m_fileName == null ) { // use " lazy initializing " if ( ! m_sitePath . endsWith ( "/" ) ) { m_fileName = m_sitePath . substring ( m_sitePath . lastIndexOf ( "/" ) + 1 , m_sitePath . length ( ) ) ; } else { m_fileName = m_sitePath . substring ( m_sitePath . substring ( 0 , m_sitePath . length ( ) - 1 ) . lastIndexOf ( "/" ) + 1 , m_sitePath . length ( ) ) ; } } return m_fileName ;
public class BeanHelper { /** * get association type . * @ param ppropertyDescriptor property description * @ param puseField use field * @ return JClassType */ public JClassType getAssociationType ( final PropertyDescriptor ppropertyDescriptor , final boolean puseField ) { } }
final JType type = getElementType ( ppropertyDescriptor , puseField ) ; if ( type == null ) { return null ; } final JArrayType jarray = type . isArray ( ) ; if ( jarray != null ) { return jarray . getComponentType ( ) . isClassOrInterface ( ) ; } final JParameterizedType jptype = type . isParameterized ( ) ; JClassType [ ] typeArgs ; if ( jptype == null ) { final JRawType jrtype = type . isRawType ( ) ; typeArgs = jrtype . getGenericType ( ) . getTypeParameters ( ) ; } else { typeArgs = jptype . getTypeArgs ( ) ; } // it is either a Iterable or a Map use the last type arg . return typeArgs [ typeArgs . length - 1 ] . isClassOrInterface ( ) ;
public class ObjectUtils { /** * Makes a shallow copy of the source object into the target one excluding properties not in * < code > propertyNames < / code > . * This method differs from { @ link ReflectionUtils # shallowCopyFieldState ( Object , Object ) } this doesn ' t require * source and target objects to share the same class hierarchy . * @ param source * the source object . * @ param target * the target object . * @ param propertyNames * the property names to be processed . Never mind if property names are invalid , in such a case are * ignored . */ public static void shallowCopy ( Object source , Object target , final String ... propertyNames ) { } }
ObjectUtils . doShallowCopy ( source , target , Boolean . FALSE , propertyNames ) ;
public class APSPSolver { /** * This method computes the root mean square rigidity of a consistent STN ( the inverse concept of flexibility of a STN ) . * If the STN is completely rigid , then its rigidity is 1 . If the STN has no constraints , its rigidity is 0. * This measure in proposed in [ Luke Hunsberger , 2002 ] . * @ return Root mean square rigidity of a consistent STN . */ public double getRMSRigidity ( ) { } }
rigidity = new double [ this . getVariables ( ) . length ] ; for ( int i = 0 ; i < this . getVariables ( ) . length ; i ++ ) { if ( ( ( TimePoint ) this . getVariables ( ) [ i ] ) . isUsed ( ) ) { rigidity [ i ] = ( ( ( double ) 1 / ( ( double ) ( 1 + ( ( TimePoint ) this . getVariables ( ) [ i ] ) . getUpperBound ( ) - ( ( TimePoint ) this . getVariables ( ) [ i ] ) . getLowerBound ( ) ) ) ) ) ; // System . out . println ( i + " " + j + " - > " + distance [ this . getVariables ( ) [ i ] . getID ( ) ] [ this . getVariables ( ) [ j ] . getID ( ) ] ) ; // System . out . println ( i + " " + j + " - > " + rigidity [ i ] [ j ] ) ; } } double sigma = 0 ; for ( int i = 0 ; i < this . getVariables ( ) . length ; i ++ ) { if ( ( ( TimePoint ) this . getVariables ( ) [ i ] ) . isUsed ( ) ) { sigma += Math . pow ( rigidity [ i ] , 2.0 ) ; } } return Math . sqrt ( ( ( double ) sigma ) * ( ( double ) 2 / ( this . getVariables ( ) . length * ( this . getVariables ( ) . length + 1 ) ) ) ) ;
public class AttributeUtils { /** * Creates an { @ code AttributeValue } object of the given class and schema type . * After the object has been constructed , its setter methods should be called to setup the value object before adding * it to the attribute itself . * @ param < T > * the type * @ param schemaType * the schema type that should be assigned to the attribute value , i . e . , * { @ code xsi : type = " eidas : CurrentFamilyNameType " } * @ param clazz * the type of the attribute value * @ return the attribute value * @ see # createAttributeValueObject ( Class ) */ public static < T extends XMLObject > T createAttributeValueObject ( QName schemaType , Class < T > clazz ) { } }
XMLObjectBuilder < ? > builder = XMLObjectProviderRegistrySupport . getBuilderFactory ( ) . getBuilder ( schemaType ) ; XMLObject object = builder . buildObject ( AttributeValue . DEFAULT_ELEMENT_NAME , schemaType ) ; return clazz . cast ( object ) ;
public class XMLSitemapProvider { /** * Create URL sets from every provider and invoke the provided consumer with * it . * @ param aConsumer * The consumer to be invoked . Must be able to handle < code > null < / code > * and empty values . May itself not be < code > null < / code > . */ public static void forEachURLSet ( @ Nonnull final Consumer < ? super XMLSitemapURLSet > aConsumer ) { } }
ValueEnforcer . notNull ( aConsumer , "Consumer" ) ; for ( final IXMLSitemapProviderSPI aSPI : s_aProviders ) { final XMLSitemapURLSet aURLSet = aSPI . createURLSet ( ) ; aConsumer . accept ( aURLSet ) ; }
public class PortletEntityRegistryImpl { /** * / * ( non - Javadoc ) * @ see org . apereo . portal . portlet . registry . IPortletEntityRegistry # storePortletEntity ( org . apereo . portal . portlet . om . IPortletEntity ) */ @ Override public void storePortletEntity ( HttpServletRequest request , final IPortletEntity portletEntity ) { } }
Validate . notNull ( portletEntity , "portletEntity can not be null" ) ; final IUserInstance userInstance = this . userInstanceManager . getUserInstance ( request ) ; final IPerson person = userInstance . getPerson ( ) ; if ( person . isGuest ( ) ) { // Never persist things for the guest user , just rely on in - memory storage return ; } final IPortletEntityId wrapperPortletEntityId = portletEntity . getPortletEntityId ( ) ; final Lock portletEntityLock = this . getPortletEntityLock ( request , wrapperPortletEntityId ) ; portletEntityLock . lock ( ) ; try { final boolean shouldBePersisted = this . shouldBePersisted ( portletEntity ) ; if ( portletEntity instanceof PersistentPortletEntityWrapper ) { // Unwrap the persistent entity final IPortletEntity persistentEntity = ( ( PersistentPortletEntityWrapper ) portletEntity ) . getPersistentEntity ( ) ; // Already persistent entity that still has prefs if ( shouldBePersisted ) { try { this . portletEntityDao . updatePortletEntity ( persistentEntity ) ; } catch ( HibernateOptimisticLockingFailureException e ) { // Check if this exception is from the entity being deleted from under us . final boolean exists = this . portletEntityDao . portletEntityExists ( persistentEntity . getPortletEntityId ( ) ) ; if ( ! exists ) { this . logger . warn ( "The persistent portlet has already been deleted: " + persistentEntity + ". The passed entity should be persistent so a new persistent entity will be created" ) ; this . deletePortletEntity ( request , portletEntity , true ) ; this . createPersistentEntity ( persistentEntity , wrapperPortletEntityId ) ; } else { throw e ; } } } // Already persistent entity that should not be , DELETE ! else { // Capture identifiers needed to recreate the entity as session persistent final IPortletDefinitionId portletDefinitionId = portletEntity . getPortletDefinitionId ( ) ; final String layoutNodeId = portletEntity . getLayoutNodeId ( ) ; final int userId = portletEntity . getUserId ( ) ; // Delete the persistent entity this . deletePortletEntity ( request , portletEntity , false ) ; // Create a new entity and stick it in the cache this . getOrCreatePortletEntity ( request , portletDefinitionId , layoutNodeId , userId ) ; } } else if ( portletEntity instanceof SessionPortletEntityImpl ) { // There are preferences on the interim entity , create an store it if ( shouldBePersisted ) { // Remove the session scoped entity from the request and session caches this . deletePortletEntity ( request , portletEntity , false ) ; final IPortletEntity persistentEntity = createPersistentEntity ( portletEntity , wrapperPortletEntityId ) ; if ( this . logger . isTraceEnabled ( ) ) { this . logger . trace ( "Session scoped entity " + wrapperPortletEntityId + " should now be persistent. Deleted it from session cache and created persistent portlet entity " + persistentEntity . getPortletEntityId ( ) ) ; } } // Session scoped entity that is still session scoped , else { // Look for a persistent entity and delete it final String channelSubscribeId = portletEntity . getLayoutNodeId ( ) ; final int userId = portletEntity . getUserId ( ) ; IPortletEntity existingPersistentEntity = this . portletEntityDao . getPortletEntity ( channelSubscribeId , userId ) ; if ( existingPersistentEntity != null ) { final IPortletEntityId consistentPortletEntityId = this . createConsistentPortletEntityId ( existingPersistentEntity ) ; existingPersistentEntity = new PersistentPortletEntityWrapper ( existingPersistentEntity , consistentPortletEntityId ) ; this . logger . warn ( "A persistent portlet entity already exists: " + existingPersistentEntity + ". The passed entity has no preferences so the persistent version will be deleted" ) ; this . deletePortletEntity ( request , existingPersistentEntity , false ) ; // Add to request cache final PortletEntityCache < IPortletEntity > portletEntityMap = this . getPortletEntityMap ( request ) ; portletEntityMap . storeIfAbsentEntity ( portletEntity ) ; // Add to session cache final PortletEntityCache < PortletEntityData > portletEntityDataMap = this . getPortletEntityDataMap ( request ) ; portletEntityDataMap . storeIfAbsentEntity ( ( ( SessionPortletEntityImpl ) portletEntity ) . getPortletEntityData ( ) ) ; } } } else { throw new IllegalArgumentException ( "Invalid portlet entity implementation passed: " + portletEntity . getClass ( ) ) ; } } finally { portletEntityLock . unlock ( ) ; }
public class PyExpressionGenerator { /** * Generate the given object . * @ param operation the binary operation . * @ param it the target for the generated content . * @ param context the context . * @ return the operation . */ @ SuppressWarnings ( "checkstyle:cyclomaticcomplexity" ) protected XExpression _generate ( XBinaryOperation operation , IAppendable it , IExtraLanguageGeneratorContext context ) { } }
appendReturnIfExpectedReturnedExpression ( it , context ) ; final String operator = getOperatorSymbol ( operation ) ; if ( operator != null ) { it . append ( "(" ) ; // $ NON - NLS - 1 $ generate ( operation . getLeftOperand ( ) , it , context ) ; switch ( operator ) { case "-" : // $ NON - NLS - 1 $ case "+" : // $ NON - NLS - 1 $ case "*" : // $ NON - NLS - 1 $ case "/" : // $ NON - NLS - 1 $ case "%" : // $ NON - NLS - 1 $ case "-=" : // $ NON - NLS - 1 $ case "+=" : // $ NON - NLS - 1 $ case "*=" : // $ NON - NLS - 1 $ case "/=" : // $ NON - NLS - 1 $ case "%=" : // $ NON - NLS - 1 $ case "<" : // $ NON - NLS - 1 $ case ">" : // $ NON - NLS - 1 $ case "<=" : // $ NON - NLS - 1 $ case ">=" : // $ NON - NLS - 1 $ case "==" : // $ NON - NLS - 1 $ case "!=" : // $ NON - NLS - 1 $ case "<<" : // $ NON - NLS - 1 $ case ">>" : // $ NON - NLS - 1 $ it . append ( " " ) . append ( operator ) . append ( " " ) ; // $ NON - NLS - 1 $ / / $ NON - NLS - 2 $ break ; case "&&" : // $ NON - NLS - 1 $ it . append ( " and " ) ; // $ NON - NLS - 1 $ break ; case "||" : // $ NON - NLS - 1 $ it . append ( " or " ) ; // $ NON - NLS - 1 $ break ; case "===" : // $ NON - NLS - 1 $ it . append ( " is " ) ; // $ NON - NLS - 1 $ break ; case "!==" : // $ NON - NLS - 1 $ it . append ( " is not " ) ; // $ NON - NLS - 1 $ break ; default : throw new IllegalArgumentException ( MessageFormat . format ( Messages . PyExpressionGenerator_0 , operator ) ) ; } generate ( operation . getRightOperand ( ) , it , context ) ; it . append ( ")" ) ; // $ NON - NLS - 1 $ } return operation ;
public class StructrSchema { /** * Replaces the current Structr schema with the given new schema . This * method is the reverse of createFromDatabase above . * @ param app * @ param newSchema the new schema to replace the current Structr schema * @ throws FrameworkException * @ throws URISyntaxException */ public static void replaceDatabaseSchema ( final App app , final JsonSchema newSchema ) throws FrameworkException , URISyntaxException { } }
Services . getInstance ( ) . setOverridingSchemaTypesAllowed ( true ) ; try ( final Tx tx = app . tx ( ) ) { for ( final SchemaRelationshipNode schemaRelationship : app . nodeQuery ( SchemaRelationshipNode . class ) . getAsList ( ) ) { app . delete ( schemaRelationship ) ; } for ( final SchemaNode schemaNode : app . nodeQuery ( SchemaNode . class ) . getAsList ( ) ) { app . delete ( schemaNode ) ; } for ( final SchemaMethod schemaMethod : app . nodeQuery ( SchemaMethod . class ) . getAsList ( ) ) { app . delete ( schemaMethod ) ; } for ( final SchemaMethodParameter schemaMethodParameter : app . nodeQuery ( SchemaMethodParameter . class ) . getAsList ( ) ) { app . delete ( schemaMethodParameter ) ; } for ( final SchemaProperty schemaProperty : app . nodeQuery ( SchemaProperty . class ) . getAsList ( ) ) { app . delete ( schemaProperty ) ; } for ( final SchemaView schemaView : app . nodeQuery ( SchemaView . class ) . getAsList ( ) ) { app . delete ( schemaView ) ; } newSchema . createDatabaseSchema ( app , JsonSchema . ImportMode . replace ) ; tx . success ( ) ; }
public class UserController { /** * 修改密码 * @ param * @ return */ @ RequestMapping ( value = "/password" , method = RequestMethod . PUT ) @ ResponseBody public JsonObjectBase password ( @ Valid PasswordModifyForm passwordModifyForm , HttpServletRequest request ) { } }
// 校验 authValidator . validatePasswordModify ( passwordModifyForm ) ; // 修改 Visitor visitor = ThreadContext . getSessionVisitor ( ) ; userMgr . modifyPassword ( visitor . getLoginUserId ( ) , passwordModifyForm . getNew_password ( ) ) ; // re login redisLogin . logout ( request ) ; return buildSuccess ( "修改成功,请重新登录" ) ;
public class M4AReader { /** * Packages media data for return to providers . */ @ Override public ITag readTag ( ) { } }
// log . debug ( " Read tag " ) ; ITag tag = null ; try { lock . acquire ( ) ; // empty - out the pre - streaming tags first if ( ! firstTags . isEmpty ( ) ) { log . debug ( "Returning pre-tag" ) ; // Return first tags before media data return firstTags . removeFirst ( ) ; } // log . debug ( " Read tag - sample { } prevFrameSize { } audio : { } video : { } " , new Object [ ] { currentSample , prevFrameSize , audioCount , videoCount } ) ; // get the current frame MP4Frame frame = frames . get ( currentFrame ) ; log . debug ( "Playback {}" , frame ) ; int sampleSize = frame . getSize ( ) ; int time = ( int ) Math . round ( frame . getTime ( ) * 1000.0 ) ; // log . debug ( " Read tag - dst : { } base : { } time : { } " , new Object [ ] { frameTs , baseTs , time } ) ; long samplePos = frame . getOffset ( ) ; // log . debug ( " Read tag - samplePos { } " , samplePos ) ; // determine frame type and packet body padding byte type = frame . getType ( ) ; // create a byte buffer of the size of the sample ByteBuffer data = ByteBuffer . allocate ( sampleSize + 2 ) ; try { // log . debug ( " Writing audio prefix " ) ; data . put ( MP4Reader . PREFIX_AUDIO_FRAME ) ; // do we need to add the mdat offset to the sample position ? dataSource . position ( samplePos ) ; dataSource . read ( data ) ; } catch ( IOException e ) { log . error ( "Error on channel position / read" , e ) ; } // chunk the data IoBuffer payload = IoBuffer . wrap ( data . array ( ) ) ; // create the tag tag = new Tag ( type , time , payload . limit ( ) , payload , prevFrameSize ) ; // log . debug ( " Read tag - type : { } body size : { } " , ( type = = TYPE _ AUDIO ? " Audio " : " Video " ) , tag . getBodySize ( ) ) ; // increment the sample number currentFrame ++ ; // set the frame / tag size prevFrameSize = tag . getBodySize ( ) ; } catch ( InterruptedException e ) { log . warn ( "Exception acquiring lock" , e ) ; } finally { lock . release ( ) ; } // log . debug ( " Tag : { } " , tag ) ; return tag ;
public class ManualWorkQueue { /** * Whether the work is contained in the queue . * @ param context * Work to check . * @ return Whether the work was already waiting . */ public boolean isWaitingWork ( final T context ) { } }
read . lock ( ) ; try { return waitingWork . contains ( context ) ; } finally { read . unlock ( ) ; }
public class Unchecked { /** * Wrap a { @ link CheckedDoubleToLongFunction } in a { @ link DoubleToLongFunction } with a custom handler for checked exceptions . * Example : * < code > < pre > * DoubleStream . of ( 1.0 , 2.0 , 3.0 ) . mapToLong ( Unchecked . doubleToLongFunction ( * if ( d & lt ; 0.0) * throw new Exception ( " Only positive numbers allowed " ) ; * return ( long ) d ; * throw new IllegalStateException ( e ) ; * < / pre > < / code > */ public static DoubleToLongFunction doubleToLongFunction ( CheckedDoubleToLongFunction function , Consumer < Throwable > handler ) { } }
return t -> { try { return function . applyAsLong ( t ) ; } catch ( Throwable e ) { handler . accept ( e ) ; throw new IllegalStateException ( "Exception handler must throw a RuntimeException" , e ) ; } } ;
public class AddressArrayFactory { /** * Creates a fixed - length { @ link AddressArray } . * @ param homeDir - the home directory where the < code > indexes . dat < / code > is located . * @ param length - the length of { @ link AddressArray } . * @ param batchSize - the number of updates per update batch . * @ param numSyncBatches - the number of update batches required for updating the underlying indexes . * @ return an instance of { @ link AddressArray } . * @ throws Exception if an instance of { @ link AddressArray } cannot be created . */ public AddressArray createStaticAddressArray ( File homeDir , int length , int batchSize , int numSyncBatches ) throws Exception { } }
AddressArray addrArray ; if ( _indexesCached ) { addrArray = new StaticLongArray ( length , batchSize , numSyncBatches , homeDir ) ; } else { addrArray = new IOTypeLongArray ( Array . Type . STATIC , length , batchSize , numSyncBatches , homeDir ) ; } return addrArray ;
public class ImportSet { /** * Re - adds the most recently popped import to the set . If a null value was pushed , does * nothing . * @ throws IndexOutOfBoundsException if there is nothing to pop */ public void popIn ( ) { } }
String front = _pushed . remove ( _pushed . size ( ) - 1 ) ; if ( front != null ) { _imports . add ( front ) ; }
public class WildcardPattern { /** * Creates pattern with specified separator for directories . * This is used to match Java - classes , i . e . < code > org . foo . Bar < / code > against < code > org / * * < / code > . * < b > However usage of character other than " / " as a directory separator is misleading and should be avoided , * so method { @ link # create ( String ) } is preferred over this one . < / b > * Also note that no matter whether forward or backward slashes were used in the < code > antPattern < / code > * the returned pattern will use < code > directorySeparator < / code > . * Thus to match Windows - style path " dir \ file . ext " against pattern " dir / file . ext " normalization should be performed . */ public static WildcardPattern create ( String pattern , String directorySeparator ) { } }
String key = pattern + directorySeparator ; return CACHE . computeIfAbsent ( key , k -> new WildcardPattern ( pattern , directorySeparator ) ) ;
public class Dates { /** * Creates a copy on a Date . * @ param d The Date to copy , can be null * @ return null when the input Date was null , a copy of the Date otherwise */ public static Date copy ( final Date d ) { } }
if ( d == null ) { return null ; } else { return new Date ( d . getTime ( ) ) ; }
public class DObject { /** * Posts the specified event either to our dobject manager or to the compound event for which * we are currently transacting . */ public void postEvent ( DEvent event ) { } }
if ( _tevent != null ) { _tevent . postEvent ( event ) ; } else if ( _omgr != null ) { _omgr . postEvent ( event ) ; } else { log . info ( "Dropping event for non- or no longer managed object" , "oid" , getOid ( ) , "class" , getClass ( ) . getName ( ) , "event" , event ) ; }
public class ComputerVisionImpl { /** * Recognize Text operation . When you use the Recognize Text interface , the response contains a field called ' Operation - Location ' . The ' Operation - Location ' field contains the URL that you must use for your Get Recognize Text Operation Result operation . * @ param mode Type of text to recognize . Possible values include : ' Handwritten ' , ' Printed ' * @ param url Publicly reachable URL of an image * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the { @ link ServiceResponseWithHeaders } object if successful . */ public Observable < ServiceResponseWithHeaders < Void , RecognizeTextHeaders > > recognizeTextWithServiceResponseAsync ( String url , TextRecognitionMode mode ) { } }
if ( this . client . endpoint ( ) == null ) { throw new IllegalArgumentException ( "Parameter this.client.endpoint() is required and cannot be null." ) ; } if ( mode == null ) { throw new IllegalArgumentException ( "Parameter mode is required and cannot be null." ) ; } if ( url == null ) { throw new IllegalArgumentException ( "Parameter url is required and cannot be null." ) ; } ImageUrl imageUrl = new ImageUrl ( ) ; imageUrl . withUrl ( url ) ; String parameterizedHost = Joiner . on ( ", " ) . join ( "{Endpoint}" , this . client . endpoint ( ) ) ; return service . recognizeText ( mode , this . client . acceptLanguage ( ) , imageUrl , parameterizedHost , this . client . userAgent ( ) ) . flatMap ( new Func1 < Response < ResponseBody > , Observable < ServiceResponseWithHeaders < Void , RecognizeTextHeaders > > > ( ) { @ Override public Observable < ServiceResponseWithHeaders < Void , RecognizeTextHeaders > > call ( Response < ResponseBody > response ) { try { ServiceResponseWithHeaders < Void , RecognizeTextHeaders > clientResponse = recognizeTextDelegate ( response ) ; return Observable . just ( clientResponse ) ; } catch ( Throwable t ) { return Observable . error ( t ) ; } } } ) ;
public class RunList { /** * { @ link AbstractList # subList ( int , int ) } isn ' t very efficient on our { @ link Iterable } based implementation . * In fact the range check alone would require us to iterate all the elements , * so we ' d be better off just copying into ArrayList . */ @ Override public List < R > subList ( int fromIndex , int toIndex ) { } }
List < R > r = new ArrayList < > ( ) ; Iterator < R > itr = iterator ( ) ; hudson . util . Iterators . skip ( itr , fromIndex ) ; for ( int i = toIndex - fromIndex ; i > 0 ; i -- ) { r . add ( itr . next ( ) ) ; } return r ;
public class ConnInterfaceCodeGen { /** * Output class * @ param def definition * @ param out Writer * @ throws IOException ioException */ @ Override public void writeClassBody ( Definition def , Writer out ) throws IOException { } }
int indent = 1 ; out . write ( "public interface " + getClassName ( def ) ) ; writeLeftCurlyBracket ( out , 0 ) ; if ( def . getMcfDefs ( ) . get ( getNumOfMcf ( ) ) . isDefineMethodInConnection ( ) ) { if ( def . getMcfDefs ( ) . get ( getNumOfMcf ( ) ) . getMethods ( ) . size ( ) > 0 ) { for ( MethodForConnection method : def . getMcfDefs ( ) . get ( getNumOfMcf ( ) ) . getMethods ( ) ) { writeMethodSignature ( out , indent , method ) ; out . write ( ";\n" ) ; } } } else { writeSimpleMethodSignature ( out , indent , " * Call me" , "public void callMe();" ) ; } writeEol ( out ) ; writeSimpleMethodSignature ( out , indent , " * Close" , "public void close();" ) ; writeRightCurlyBracket ( out , 0 ) ;
public class OptionsSpiderPanel { /** * This method initializes the combobox for HandleParameters option . * @ return the combo handle parameters */ @ SuppressWarnings ( "unchecked" ) private JComboBox < HandleParametersOption > getComboHandleParameters ( ) { } }
if ( handleParameters == null ) { handleParameters = new JComboBox < > ( new HandleParametersOption [ ] { HandleParametersOption . USE_ALL , HandleParametersOption . IGNORE_VALUE , HandleParametersOption . IGNORE_COMPLETELY } ) ; handleParameters . setRenderer ( new HandleParametersOptionRenderer ( ) ) ; } return handleParameters ;
public class AdminResourcesContainer { /** * Starts the container and hence the embedded jetty server . * @ throws Exception if there is an issue while starting the server */ @ PostConstruct public void init ( ) throws Exception { } }
try { if ( alreadyInited . compareAndSet ( false , true ) ) { initAdminContainerConfigIfNeeded ( ) ; initAdminRegistryIfNeeded ( ) ; if ( ! adminContainerConfig . shouldEnable ( ) ) { return ; } if ( adminContainerConfig . shouldScanClassPathForPluginDiscovery ( ) ) { adminPageRegistry . registerAdminPagesWithClasspathScan ( ) ; } Injector adminResourceInjector ; if ( shouldShareResourcesWithParentInjector ( ) ) { adminResourceInjector = appInjector . createChildInjector ( buildAdminPluginsGuiceModules ( ) ) ; } else { adminResourceInjector = LifecycleInjector . builder ( ) . inStage ( Stage . DEVELOPMENT ) . usingBasePackages ( "com.netflix.explorers" ) . withModules ( buildAdminPluginsGuiceModules ( ) ) . build ( ) . createInjector ( ) ; adminResourceInjector . getInstance ( LifecycleManager . class ) . start ( ) ; } server = new Server ( adminContainerConfig . listenPort ( ) ) ; final List < Filter > additionaFilters = adminContainerConfig . additionalFilters ( ) ; // redirect filter based on configurable RedirectRules final Context rootHandler = new Context ( ) ; rootHandler . setContextPath ( "/" ) ; rootHandler . addFilter ( new FilterHolder ( adminResourceInjector . getInstance ( RedirectFilter . class ) ) , "/*" , Handler . DEFAULT ) ; rootHandler . addServlet ( new ServletHolder ( new DefaultServlet ( ) ) , "/*" ) ; // admin page template resources AdminResourcesFilter arfTemplatesResources = adminResourceInjector . getInstance ( AdminResourcesFilter . class ) ; Map < String , Object > props = new HashMap < > ( adminContainerConfig . getJerseyConfigProperties ( ) ) ; props . put ( PackagesResourceConfig . PROPERTY_PACKAGES , adminContainerConfig . jerseyViewableResourcePkgList ( ) + ";" + Objects . toString ( props . get ( PackagesResourceConfig . PROPERTY_PACKAGES ) ) ) ; arfTemplatesResources . setProperties ( props ) ; logger . info ( "Admin templates context : {}" , adminContainerConfig . templateResourceContext ( ) ) ; final Context adminTemplatesResHandler = new Context ( ) ; adminTemplatesResHandler . setContextPath ( adminContainerConfig . templateResourceContext ( ) ) ; adminTemplatesResHandler . setSessionHandler ( new SessionHandler ( ) ) ; adminTemplatesResHandler . addFilter ( LoggingFilter . class , "/*" , Handler . DEFAULT ) ; adminTemplatesResHandler . addFilter ( new FilterHolder ( adminResourceInjector . getInstance ( RedirectFilter . class ) ) , "/*" , Handler . DEFAULT ) ; applyAdditionalFilters ( adminTemplatesResHandler , additionaFilters ) ; adminTemplatesResHandler . addFilter ( new FilterHolder ( arfTemplatesResources ) , "/*" , Handler . DEFAULT ) ; adminTemplatesResHandler . addServlet ( new ServletHolder ( new DefaultServlet ( ) ) , "/*" ) ; // admin page data resources AdminResourcesFilter arfDataResources = adminResourceInjector . getInstance ( AdminResourcesFilter . class ) ; props = new HashMap < > ( adminContainerConfig . getJerseyConfigProperties ( ) ) ; props . put ( PackagesResourceConfig . PROPERTY_PACKAGES , appendCoreJerseyPackages ( adminPageRegistry . buildJerseyResourcePkgListForAdminPages ( ) ) + ";" + Objects . toString ( props . get ( PackagesResourceConfig . PROPERTY_PACKAGES ) ) ) ; arfDataResources . setProperties ( props ) ; logger . info ( "Admin resources context : {}" , adminContainerConfig . ajaxDataResourceContext ( ) ) ; final Context adminDataResHandler = new Context ( ) ; adminDataResHandler . setContextPath ( adminContainerConfig . ajaxDataResourceContext ( ) ) ; adminDataResHandler . addFilter ( LoggingFilter . class , "/*" , Handler . DEFAULT ) ; adminDataResHandler . addFilter ( new FilterHolder ( adminResourceInjector . getInstance ( RedirectFilter . class ) ) , "/*" , Handler . DEFAULT ) ; applyAdditionalFilters ( adminDataResHandler , additionaFilters ) ; adminDataResHandler . addFilter ( new FilterHolder ( arfDataResources ) , "/*" , Handler . DEFAULT ) ; adminDataResHandler . addServlet ( new ServletHolder ( new DefaultServlet ( ) ) , "/*" ) ; QueuedThreadPool threadPool = new QueuedThreadPool ( ) ; threadPool . setDaemon ( true ) ; server . setThreadPool ( threadPool ) ; ResourceHandler resource_handler = new ResourceHandler ( ) { @ Override public Resource getResource ( String path ) throws MalformedURLException { Resource resource = Resource . newClassPathResource ( path ) ; if ( resource == null || ! resource . exists ( ) ) { resource = Resource . newClassPathResource ( "META-INF/resources" + path ) ; } if ( resource != null && resource . isDirectory ( ) ) { return null ; } return resource ; } } ; resource_handler . setResourceBase ( "/" ) ; HandlerCollection handlers = new HandlerCollection ( ) ; handlers . setHandlers ( new Handler [ ] { resource_handler , adminTemplatesResHandler , adminDataResHandler , rootHandler } ) ; server . setHandler ( handlers ) ; for ( Connector connector : adminContainerConfig . additionalConnectors ( ) ) { server . addConnector ( connector ) ; } server . start ( ) ; final Connector connector = server . getConnectors ( ) [ 0 ] ; serverPort = connector . getLocalPort ( ) ; logger . info ( "jetty started on port {}" , serverPort ) ; } } catch ( Exception e ) { logger . error ( "Exception in building AdminResourcesContainer " , e ) ; }
public class Config { /** * < p > Initializes the configuration . < / p > < p > Looks for a properties ( programName . conf ) in the classpath , the user * home directory , and in the run directory to load < / p > < p > The command line arguments are parsed and added to the * configuration . < / p > * @ param programName the program name * @ param args the command line arguments * @ param otherPackages Other packages whose configs we should load * @ return Non config / option parameters from command line */ public static String [ ] initialize ( String programName , String [ ] args , String ... otherPackages ) { } }
return initialize ( programName , args , new CommandLineParser ( ) , otherPackages ) ;
public class RandomUtil { /** * 获得一个随机的字符串 * @ param baseString 随机字符选取的样本 * @ param length 字符串的长度 * @ return 随机字符串 */ public static String randomString ( String baseString , int length ) { } }
final StringBuilder sb = new StringBuilder ( ) ; if ( length < 1 ) { length = 1 ; } int baseLength = baseString . length ( ) ; for ( int i = 0 ; i < length ; i ++ ) { int number = getRandom ( ) . nextInt ( baseLength ) ; sb . append ( baseString . charAt ( number ) ) ; } return sb . toString ( ) ;
public class LoadBalancer { /** * factory methods */ public static LoadBalancer getDefault ( String urls ) { } }
String [ ] endpoints = new String [ 0 ] ; if ( urls != null ) { endpoints = urls . split ( URL_SEP ) ; } return getDefault ( Arrays . asList ( endpoints ) ) ;
public class PropertyClient { /** * Does a property description scan of the properties in all interface objects . * @ param allProperties < code > true < / code > to scan all property descriptions in the interface * objects , < code > false < / code > to only scan the object type descriptions , i . e . , * { @ link PropertyAccess . PID # OBJECT _ TYPE } * @ param consumer invoked on every property read during the scan , taking a property * { @ link Description } argument * @ throws KNXException on adapter errors while querying the descriptions * @ throws InterruptedException on thread interrupt */ public void scanProperties ( final boolean allProperties , final Consumer < Description > consumer ) throws KNXException , InterruptedException { } }
for ( int index = 0 ; scan ( index , allProperties , consumer ) > 0 ; ++ index ) ;
public class Expressions { /** * Creates an IsEqual expression from the given expression and constant . * @ param left The left expression . * @ param constant The constant to compare to ( must be a String ) . * @ throws IllegalArgumentException If constant is not a String . * @ return A new IsEqual binary expression . */ public static StringIsEqual isEqual ( StringExpression left , Object constant ) { } }
if ( ! ( constant instanceof String ) ) throw new IllegalArgumentException ( "constant is not a String" ) ; return new StringIsEqual ( left , constant ( ( String ) constant ) ) ;
public class ArrayDeserializer { /** * Reads the array . */ public Object readList ( AbstractHessianInput in , int length ) throws IOException { } }
if ( length >= 0 ) { Object [ ] data = createArray ( length ) ; in . addRef ( data ) ; if ( _componentType != null ) { for ( int i = 0 ; i < data . length ; i ++ ) data [ i ] = in . readObject ( _componentType ) ; } else { for ( int i = 0 ; i < data . length ; i ++ ) data [ i ] = in . readObject ( ) ; } in . readListEnd ( ) ; return data ; } else { ArrayList list = new ArrayList ( ) ; in . addRef ( list ) ; if ( _componentType != null ) { while ( ! in . isEnd ( ) ) list . add ( in . readObject ( _componentType ) ) ; } else { while ( ! in . isEnd ( ) ) list . add ( in . readObject ( ) ) ; } in . readListEnd ( ) ; Object [ ] data = createArray ( list . size ( ) ) ; for ( int i = 0 ; i < data . length ; i ++ ) data [ i ] = list . get ( i ) ; return data ; }
public class JobTargetExecutionsInner { /** * Lists the target executions of a job step execution . * @ param resourceGroupName The name of the resource group that contains the resource . You can obtain this value from the Azure Resource Manager API or the portal . * @ param serverName The name of the server . * @ param jobAgentName The name of the job agent . * @ param jobName The name of the job to get . * @ param jobExecutionId The id of the job execution * @ param stepName The name of the step . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the PagedList & lt ; JobExecutionInner & gt ; object */ public Observable < Page < JobExecutionInner > > listByStepAsync ( final String resourceGroupName , final String serverName , final String jobAgentName , final String jobName , final UUID jobExecutionId , final String stepName ) { } }
return listByStepWithServiceResponseAsync ( resourceGroupName , serverName , jobAgentName , jobName , jobExecutionId , stepName ) . map ( new Func1 < ServiceResponse < Page < JobExecutionInner > > , Page < JobExecutionInner > > ( ) { @ Override public Page < JobExecutionInner > call ( ServiceResponse < Page < JobExecutionInner > > response ) { return response . body ( ) ; } } ) ;
public class TypeConverter { /** * Convert the passed source value to short * @ param aSrcValue * The source value . May be < code > null < / code > . * @ param nDefault * The default value to be returned if an error occurs during type * conversion . * @ return The converted value . * @ throws RuntimeException * If the converter itself throws an exception * @ see TypeConverterProviderBestMatch */ public static short convertToShort ( @ Nullable final Object aSrcValue , final short nDefault ) { } }
final Short aValue = convert ( aSrcValue , Short . class , null ) ; return aValue == null ? nDefault : aValue . shortValue ( ) ;
public class CsvReader { /** * Sort the rows by comparator * @ param comparator the comparator used for sorting */ public void sortRows ( Comparator < List < String > > comparator ) { } }
if ( data . isEmpty ( ) ) return ; Collections . sort ( data , comparator ) ;
public class WsByteBufferUtils { /** * Convert a buffer to a string using the input starting position and ending * limit . * @ param buff * @ param position * @ param limit * @ return String */ public static final String asString ( WsByteBuffer buff , int position , int limit ) { } }
byte [ ] data = asByteArray ( buff , position , limit ) ; return ( null != data ) ? new String ( data ) : null ;
public class IdemixUtils { /** * append appends a boolean array to an existing byte array * @ param data the data to which we want to append * @ param toAppend the data to be appended * @ return a new byte [ ] of data + toAppend */ static byte [ ] append ( byte [ ] data , boolean [ ] toAppend ) { } }
byte [ ] toAppendBytes = new byte [ toAppend . length ] ; for ( int i = 0 ; i < toAppend . length ; i ++ ) { toAppendBytes [ i ] = toAppend [ i ] ? ( byte ) 1 : ( byte ) 0 ; } return append ( data , toAppendBytes ) ;
public class PortProber { /** * Returns a port that is within a probable free range . < p / > Based on the ports in * http : / / en . wikipedia . org / wiki / Ephemeral _ ports , this method stays away from all well - known * ephemeral port ranges , since they can arbitrarily race with the operating system in * allocations . Due to the port - greedy nature of selenium this happens fairly frequently . * Staying within the known safe range increases the probability tests will run green quite * significantly . * @ return a random port number */ private static int createAcceptablePort ( ) { } }
synchronized ( random ) { final int FIRST_PORT ; final int LAST_PORT ; int freeAbove = HIGHEST_PORT - ephemeralRangeDetector . getHighestEphemeralPort ( ) ; int freeBelow = max ( 0 , ephemeralRangeDetector . getLowestEphemeralPort ( ) - START_OF_USER_PORTS ) ; if ( freeAbove > freeBelow ) { FIRST_PORT = ephemeralRangeDetector . getHighestEphemeralPort ( ) ; LAST_PORT = 65535 ; } else { FIRST_PORT = 1024 ; LAST_PORT = ephemeralRangeDetector . getLowestEphemeralPort ( ) ; } if ( FIRST_PORT == LAST_PORT ) { return FIRST_PORT ; } if ( FIRST_PORT > LAST_PORT ) { throw new UnsupportedOperationException ( "Could not find ephemeral port to use" ) ; } final int randomInt = random . nextInt ( ) ; final int portWithoutOffset = Math . abs ( randomInt % ( LAST_PORT - FIRST_PORT + 1 ) ) ; return portWithoutOffset + FIRST_PORT ; }
public class PluginRepositoryManager { /** * Return a plugin repository identified by the plugin interface type . * @ param pluginType * The plugin interface . * @ param < P > * The plugin type * @ return The { @ link com . buschmais . xo . impl . plugin . PluginRepository } . */ public < P extends PluginRepository < ? , ? > > P getPluginManager ( Class < ? > pluginType ) { } }
return ( P ) pluginRepositories . get ( pluginType ) ;
public class DecimalFormat { /** * Returns true if a grouping separator belongs at the given position , based on whether * grouping is in use and the values of the primary and secondary grouping interval . * @ param pos the number of integer digits to the right of the current position . Zero * indicates the position after the rightmost integer digit . * @ return true if a grouping character belongs at the current position . */ private boolean isGroupingPosition ( int pos ) { } }
boolean result = false ; if ( isGroupingUsed ( ) && ( pos > 0 ) && ( groupingSize > 0 ) ) { if ( ( groupingSize2 > 0 ) && ( pos > groupingSize ) ) { result = ( ( pos - groupingSize ) % groupingSize2 ) == 0 ; } else { result = pos % groupingSize == 0 ; } } return result ;
public class CheckClassAdapter { /** * Checks a class signature . * @ param signature * a string containing the signature that must be checked . */ public static void checkClassSignature ( final String signature ) { } }
// ClassSignature : // FormalTypeParameters ? ClassTypeSignature ClassTypeSignature * int pos = 0 ; if ( getChar ( signature , 0 ) == '<' ) { pos = checkFormalTypeParameters ( signature , pos ) ; } pos = checkClassTypeSignature ( signature , pos ) ; while ( getChar ( signature , pos ) == 'L' ) { pos = checkClassTypeSignature ( signature , pos ) ; } if ( pos != signature . length ( ) ) { throw new IllegalArgumentException ( signature + ": error at index " + pos ) ; }
public class XMLFormatter { /** * Append the time and date in ISO 8601 format */ private void appendISO8601 ( StringBuilder sb , long millis ) { } }
GregorianCalendar cal = new GregorianCalendar ( ) ; cal . setTimeInMillis ( millis ) ; sb . append ( cal . get ( Calendar . YEAR ) ) ; sb . append ( '-' ) ; a2 ( sb , cal . get ( Calendar . MONTH ) + 1 ) ; sb . append ( '-' ) ; a2 ( sb , cal . get ( Calendar . DAY_OF_MONTH ) ) ; sb . append ( 'T' ) ; a2 ( sb , cal . get ( Calendar . HOUR_OF_DAY ) ) ; sb . append ( ':' ) ; a2 ( sb , cal . get ( Calendar . MINUTE ) ) ; sb . append ( ':' ) ; a2 ( sb , cal . get ( Calendar . SECOND ) ) ;
public class DescribeNetworkInterfacePermissionsRequest { /** * One or more network interface permission IDs . * @ param networkInterfacePermissionIds * One or more network interface permission IDs . */ public void setNetworkInterfacePermissionIds ( java . util . Collection < String > networkInterfacePermissionIds ) { } }
if ( networkInterfacePermissionIds == null ) { this . networkInterfacePermissionIds = null ; return ; } this . networkInterfacePermissionIds = new com . amazonaws . internal . SdkInternalList < String > ( networkInterfacePermissionIds ) ;
public class FileEntry { /** * { @ inheritDoc } */ @ Override @ FFDCIgnore ( PrivilegedActionException . class ) public URL getResource ( ) { } }
try { URL url = ( URL ) AccessController . doPrivileged ( new PrivilegedExceptionAction ( ) { @ Override public URL run ( ) throws MalformedURLException { return file . toURI ( ) . toURL ( ) ; } } ) ; return url ; } catch ( PrivilegedActionException e ) { return null ; }
public class MetadataService { /** * Updates a { @ link ProjectRole } for the specified { @ code member } in the specified { @ code projectName } . */ public CompletableFuture < Revision > updateMemberRole ( Author author , String projectName , User member , ProjectRole projectRole ) { } }
requireNonNull ( author , "author" ) ; requireNonNull ( projectName , "projectName" ) ; requireNonNull ( member , "member" ) ; requireNonNull ( projectRole , "projectRole" ) ; final Change < JsonNode > change = Change . ofJsonPatch ( METADATA_JSON , new ReplaceOperation ( JsonPointer . compile ( "/members" + encodeSegment ( member . id ( ) ) + "/role" ) , Jackson . valueToTree ( projectRole ) ) . toJsonNode ( ) ) ; final String commitSummary = "Updates the role of the member '" + member . id ( ) + "' as '" + projectRole + "' for the project " + projectName ; return metadataRepo . push ( projectName , Project . REPO_DOGMA , author , commitSummary , change ) ;
public class SequentialMultiInstanceBehavior { /** * Called when the wrapped { @ link ActivityBehavior } calls the { @ link AbstractBpmnActivityBehavior # leave ( ActivityExecution ) } method . Handles the completion of one instance , and executes the logic for * the sequential behavior . */ public void leave ( DelegateExecution childExecution ) { } }
DelegateExecution multiInstanceRootExecution = getMultiInstanceRootExecution ( childExecution ) ; int nrOfInstances = getLoopVariable ( multiInstanceRootExecution , NUMBER_OF_INSTANCES ) ; int loopCounter = getLoopVariable ( childExecution , getCollectionElementIndexVariable ( ) ) + 1 ; int nrOfCompletedInstances = getLoopVariable ( multiInstanceRootExecution , NUMBER_OF_COMPLETED_INSTANCES ) + 1 ; int nrOfActiveInstances = getLoopVariable ( multiInstanceRootExecution , NUMBER_OF_ACTIVE_INSTANCES ) ; setLoopVariable ( multiInstanceRootExecution , NUMBER_OF_COMPLETED_INSTANCES , nrOfCompletedInstances ) ; setLoopVariable ( childExecution , getCollectionElementIndexVariable ( ) , loopCounter ) ; logLoopDetails ( childExecution , "instance completed" , loopCounter , nrOfCompletedInstances , nrOfActiveInstances , nrOfInstances ) ; Context . getCommandContext ( ) . getHistoryManager ( ) . recordActivityEnd ( ( ExecutionEntity ) childExecution , null ) ; callActivityEndListeners ( childExecution ) ; // executeCompensationBoundaryEvents ( execution . getCurrentFlowElement ( ) , execution ) ; if ( loopCounter >= nrOfInstances || completionConditionSatisfied ( multiInstanceRootExecution ) ) { removeLocalLoopVariable ( childExecution , getCollectionElementIndexVariable ( ) ) ; multiInstanceRootExecution . setMultiInstanceRoot ( false ) ; multiInstanceRootExecution . setScope ( false ) ; multiInstanceRootExecution . setCurrentFlowElement ( childExecution . getCurrentFlowElement ( ) ) ; Context . getCommandContext ( ) . getExecutionEntityManager ( ) . deleteChildExecutions ( ( ExecutionEntity ) multiInstanceRootExecution , "MI_END" , false ) ; super . leave ( multiInstanceRootExecution ) ; } else { try { if ( childExecution . getCurrentFlowElement ( ) instanceof SubProcess ) { ExecutionEntityManager executionEntityManager = Context . getCommandContext ( ) . getExecutionEntityManager ( ) ; ExecutionEntity executionToContinue = executionEntityManager . createChildExecution ( ( ExecutionEntity ) multiInstanceRootExecution ) ; executionToContinue . setCurrentFlowElement ( childExecution . getCurrentFlowElement ( ) ) ; executionToContinue . setScope ( true ) ; setLoopVariable ( executionToContinue , getCollectionElementIndexVariable ( ) , loopCounter ) ; executeOriginalBehavior ( executionToContinue , loopCounter ) ; } else { executeOriginalBehavior ( childExecution , loopCounter ) ; } } catch ( BpmnError error ) { // re - throw business fault so that it can be caught by an Error // Intermediate Event or Error Event Sub - Process in the process throw error ; } catch ( Exception e ) { throw new ActivitiException ( "Could not execute inner activity behavior of multi instance behavior" , e ) ; } }
public class AutoMap { /** * Use given relative xpath to resolve the value . * @ param path * relative xpath * @ return value in DOM tree . null if no value is present . */ @ SuppressWarnings ( "unchecked" ) @ Override public XBAutoList < T > getList ( final CharSequence path ) { } }
return ( XBAutoList < T > ) getList ( path , invocationContext . getTargetComponentType ( ) ) ;
public class YarnContainerManager { /** * NM Callback : NM accepts the starting container request . * @ param containerId ID of a new container being started . * @ param stringByteBufferMap a Map between the auxiliary service names and their outputs . Not used . */ @ Override public void onContainerStarted ( final ContainerId containerId , final Map < String , ByteBuffer > stringByteBufferMap ) { } }
final Optional < Container > container = this . containers . getOptional ( containerId . toString ( ) ) ; if ( container . isPresent ( ) ) { this . nodeManager . getContainerStatusAsync ( containerId , container . get ( ) . getNodeId ( ) ) ; }
public class FreemarkerConfigurationBuilder { /** * Sets the charset used for decoding byte sequences to character sequences * when reading template files in a locale for which no explicit encoding * was specified via { @ link Configuration # setEncoding ( Locale , String ) } . Note * that by default there is no locale specified for any locale , so the * default encoding is always in effect . * Defaults to the default system encoding , which can change from one server * to another , so < b > you should always set this setting < / b > . If you don ' t * know what charset your should chose , { @ code " UTF - 8 " } is usually a good * choice . * Note that individual templates may specify their own charset by starting * with < tt > & lt ; # ftl encoding = " . . . " & gt ; < / tt > * You can specify a direct value . For example : * < pre > * . defaultEncoding ( " UTF - 8 " ) ; * < / pre > * You can also specify one or several property keys . For example : * < pre > * . defaultEncoding ( " $ { custom . property . high - priority } " , " $ { custom . property . low - priority } " ) ; * < / pre > * The properties are not immediately evaluated . The evaluation will be done * when the { @ link # build ( ) } method is called . * If you provide several property keys , evaluation will be done on the * first key and if the property exists ( see { @ link EnvironmentBuilder } ) , * its value is used . If the first property doesn ' t exist in properties , * then it tries with the second one and so on . * @ param encodings * one value , or one or several property keys * @ return this instance for fluent chaining */ public FreemarkerConfigurationBuilder < P > defaultEncoding ( String ... encodings ) { } }
this . defaultEncodings . addAll ( Arrays . asList ( encodings ) ) ; return this ;
public class PrivateKeyReader { /** * Reads the given byte array with the default RSA algorithm and returns the { @ link PrivateKey } * object . * @ param privateKeyBytes * the byte array that contains the private key bytes * @ return the { @ link PrivateKey } object * @ throws NoSuchAlgorithmException * is thrown if instantiation of the cypher object fails . * @ throws InvalidKeySpecException * is thrown if generation of the SecretKey object fails . * @ throws NoSuchProviderException * is thrown if the specified provider is not registered in the security provider * list . */ public static PrivateKey readPrivateKey ( final byte [ ] privateKeyBytes ) throws NoSuchAlgorithmException , InvalidKeySpecException , NoSuchProviderException { } }
return readPrivateKey ( privateKeyBytes , KeyPairGeneratorAlgorithm . RSA . getAlgorithm ( ) ) ;
public class CharacterCLA { /** * { @ inheritDoc } */ @ Override public Character [ ] getValueAsCharacterArray ( ) throws ParseException { } }
final Character [ ] result = new Character [ size ( ) ] ; for ( int r = 0 ; r < size ( ) ; r ++ ) result [ r ] = getValue ( r ) ; return result ;
public class CmsEventManager { /** * Initialize this event manager with all events from the given base event manager . < p > * @ param base the base event manager to initialize this event manager with */ protected void initialize ( CmsEventManager base ) { } }
m_eventListeners = new HashMap < Integer , List < I_CmsEventListener > > ( base . getEventListeners ( ) ) ;
public class JTMConfigurationProvider { /** * The setTMS method call is used to alert the JTMConfigurationProvider to the presence of a * TransactionManagerService . * @ param tms */ public void setTMS ( TransactionManagerService tms ) { } }
if ( tc . isDebugEnabled ( ) ) Tr . debug ( tc , "setTMS " + tms ) ; tmsRef = tms ;
public class DockerClient { /** * Push the latest image to the repository . * @ param name The name , e . g . " alexec / busybox " or just " busybox " if you want to default . Not null . */ public ClientResponse push ( final String name ) throws DockerException { } }
if ( name == null ) { throw new IllegalArgumentException ( "name is null" ) ; } try { final String registryAuth = registryAuth ( ) ; return client . resource ( restEndpointUrl + "/images/" + name ( name ) + "/push" ) . header ( "X-Registry-Auth" , registryAuth ) . accept ( MediaType . APPLICATION_JSON ) . post ( ClientResponse . class ) ; } catch ( UniformInterfaceException e ) { throw new DockerException ( e ) ; }
public class AmazonWorkspacesClient { /** * Reboots the specified WorkSpaces . * You cannot reboot a WorkSpace unless its state is < code > AVAILABLE < / code > or < code > UNHEALTHY < / code > . * This operation is asynchronous and returns before the WorkSpaces have rebooted . * @ param rebootWorkspacesRequest * @ return Result of the RebootWorkspaces operation returned by the service . * @ sample AmazonWorkspaces . RebootWorkspaces * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / workspaces - 2015-04-08 / RebootWorkspaces " target = " _ top " > AWS * API Documentation < / a > */ @ Override public RebootWorkspacesResult rebootWorkspaces ( RebootWorkspacesRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeRebootWorkspaces ( request ) ;
public class AtomCache { /** * Returns a { @ link Structure } corresponding to the CATH identifier supplied in { @ code structureName } , using the specified { @ link CathDatabase } . */ public Structure getStructureForCathDomain ( StructureName structureName , CathDatabase cathInstall ) throws IOException , StructureException { } }
CathDomain cathDomain = cathInstall . getDomainByCathId ( structureName . getIdentifier ( ) ) ; Structure s = getStructureForPdbId ( cathDomain . getIdentifier ( ) ) ; Structure n = cathDomain . reduce ( s ) ; // add the ligands of the chain . . . Chain newChain = n . getPolyChainByPDB ( structureName . getChainId ( ) ) ; List < Chain > origChains = s . getNonPolyChainsByPDB ( structureName . getChainId ( ) ) ; for ( Chain origChain : origChains ) { List < Group > ligands = origChain . getAtomGroups ( ) ; for ( Group g : ligands ) { if ( ! newChain . getAtomGroups ( ) . contains ( g ) ) { newChain . addGroup ( g ) ; } } } return n ;
public class RestController { /** * Creates a new entity from a html form post . */ @ Transactional @ PostMapping ( value = "/{entityTypeId}" , headers = "Content-Type=application/x-www-form-urlencoded" ) public void createFromFormPost ( @ PathVariable ( "entityTypeId" ) String entityTypeId , HttpServletRequest request , HttpServletResponse response ) { } }
Map < String , Object > paramMap = new HashMap < > ( ) ; for ( String param : request . getParameterMap ( ) . keySet ( ) ) { String [ ] values = request . getParameterValues ( param ) ; String value = values != null ? StringUtils . join ( values , ',' ) : null ; if ( StringUtils . isNotBlank ( value ) ) { paramMap . put ( param , value ) ; } } createInternal ( entityTypeId , paramMap , response ) ;
public class Client { /** * Declares a shovel . * @ param vhost virtual host where to declare the shovel * @ param info Shovel info . */ public void declareShovel ( String vhost , ShovelInfo info ) { } }
Map < String , Object > props = info . getDetails ( ) . getPublishProperties ( ) ; if ( props != null && props . isEmpty ( ) ) { throw new IllegalArgumentException ( "Shovel publish properties must be a non-empty map or null" ) ; } final URI uri = uriWithPath ( "./parameters/shovel/" + encodePathSegment ( vhost ) + "/" + encodePathSegment ( info . getName ( ) ) ) ; this . rt . put ( uri , info ) ;
public class AppServiceEnvironmentsInner { /** * Move an App Service Environment to a different VNET . * Move an App Service Environment to a different VNET . * @ param resourceGroupName Name of the resource group to which the resource belongs . * @ param name Name of the App Service Environment . * @ param vnetInfo Details for the new virtual network . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ throws DefaultErrorResponseException thrown if the request is rejected by server * @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @ return the PagedList & lt ; SiteInner & gt ; object if successful . */ public PagedList < SiteInner > changeVnet ( final String resourceGroupName , final String name , final VirtualNetworkProfile vnetInfo ) { } }
ServiceResponse < Page < SiteInner > > response = changeVnetSinglePageAsync ( resourceGroupName , name , vnetInfo ) . toBlocking ( ) . single ( ) ; return new PagedList < SiteInner > ( response . body ( ) ) { @ Override public Page < SiteInner > nextPage ( String nextPageLink ) { return changeVnetNextSinglePageAsync ( nextPageLink ) . toBlocking ( ) . single ( ) . body ( ) ; } } ;
public class CompensationEventListener { /** * When signaling compensation , you can do that in 1 of 2 ways : * 1 . signalEvent ( " Compensation " , < node - with - compensation - handler - id > ) * This is specific compensation , that only possibly triggers the compensation handler * attached to the node referred to by the < node - with - compensation - handler - id > . * 2 . signalEvent ( " Compensation " , " implicit : " + < node - container - containing - compensation - scope - id > ) * This is implicit or general compensation , in which you trigger all visible compensation handlers * ( in the proper order , etc . ) in the ( sub - ) process referred to by * the < node - container - containing - compensation - scope - id > . */ public void signalEvent ( String compensationType , Object activityRefStr ) { } }
if ( activityRefStr == null || ! ( activityRefStr instanceof String ) ) { throw new WorkflowRuntimeException ( null , getProcessInstance ( ) , "Compensation can only be triggered with String events, not an event of type " + activityRefStr == null ? "null" : activityRefStr . getClass ( ) . getSimpleName ( ) ) ; } // 1 . parse the activity ref ( is it general or specific compensation ? ) String activityRef = ( String ) activityRefStr ; String toCompensateNodeId = activityRef ; boolean generalCompensation = false ; if ( activityRef . startsWith ( IMPLICIT_COMPENSATION_PREFIX ) ) { toCompensateNodeId = activityRef . substring ( IMPLICIT_COMPENSATION_PREFIX . length ( ) ) ; generalCompensation = true ; } org . jbpm . process . core . Process process = ( org . jbpm . process . core . Process ) instance . getProcess ( ) ; // 2 . for specific compensation : find the node that will be compensated // for general compensation : find the compensation scope container that contains all the visible compensation handlers Node toCompensateNode = null ; ContextContainer compensationScopeContainer = null ; if ( generalCompensation ) { if ( toCompensateNodeId . equals ( instance . getProcessId ( ) ) ) { compensationScopeContainer = process ; } else { compensationScopeContainer = ( ContextContainer ) findNode ( toCompensateNodeId ) ; } } else { toCompensateNode = findNode ( toCompensateNodeId ) ; } // 3 . If the node exists , // a . find the node container for which the compensation handler is visible // b . create the compensation scope instance // c . handle the exception ( which also cleans up the generated node instances ) if ( toCompensateNode != null || compensationScopeContainer != null ) { CompensationScope compensationScope = null ; if ( compensationScopeContainer != null ) { compensationScope = ( CompensationScope ) compensationScopeContainer . getDefaultContext ( COMPENSATION_SCOPE ) ; } else { compensationScope = ( CompensationScope ) ( ( NodeImpl ) toCompensateNode ) . resolveContext ( COMPENSATION_SCOPE , toCompensateNodeId ) ; } assert compensationScope != null : "Compensation scope for node [" + toCompensateNodeId + "] could not be found!" ; CompensationScopeInstance scopeInstance ; if ( compensationScope . getContextContainerId ( ) . equals ( process . getId ( ) ) ) { // process level compensation scopeInstance = ( CompensationScopeInstance ) instance . getContextInstance ( compensationScope ) ; } else { // nested compensation Stack < NodeInstance > generatedInstances ; if ( toCompensateNode == null ) { // logic is the same if it ' s specific or general generatedInstances = createNodeInstanceContainers ( ( Node ) compensationScopeContainer , true ) ; } else { generatedInstances = createNodeInstanceContainers ( toCompensateNode , false ) ; } NodeInstance nodeInstanceContainer = generatedInstances . peek ( ) ; scopeInstance = ( ( CompensationScopeInstance ) ( ( ContextInstanceContainer ) nodeInstanceContainer ) . getContextInstance ( compensationScope ) ) ; scopeInstance . addCompensationInstances ( generatedInstances ) ; } scopeInstance . handleException ( activityRef , null ) ; }
public class ElectronImpactNBEReaction { /** * set the active center for this molecule . The active center * will be heteroatoms which contain at least one group of * lone pair electrons . * @ param reactant The molecule to set the activity * @ throws CDKException */ private void setActiveCenters ( IAtomContainer reactant ) throws CDKException { } }
Iterator < IAtom > atoms = reactant . atoms ( ) . iterator ( ) ; while ( atoms . hasNext ( ) ) { IAtom atom = atoms . next ( ) ; if ( reactant . getConnectedLonePairsCount ( atom ) > 0 && reactant . getConnectedSingleElectronsCount ( atom ) == 0 ) atom . setFlag ( CDKConstants . REACTIVE_CENTER , true ) ; }
public class CommerceNotificationAttachmentPersistenceImpl { /** * Returns the commerce notification attachment with the primary key or returns < code > null < / code > if it could not be found . * @ param primaryKey the primary key of the commerce notification attachment * @ return the commerce notification attachment , or < code > null < / code > if a commerce notification attachment with the primary key could not be found */ @ Override public CommerceNotificationAttachment fetchByPrimaryKey ( Serializable primaryKey ) { } }
Serializable serializable = entityCache . getResult ( CommerceNotificationAttachmentModelImpl . ENTITY_CACHE_ENABLED , CommerceNotificationAttachmentImpl . class , primaryKey ) ; if ( serializable == nullModel ) { return null ; } CommerceNotificationAttachment commerceNotificationAttachment = ( CommerceNotificationAttachment ) serializable ; if ( commerceNotificationAttachment == null ) { Session session = null ; try { session = openSession ( ) ; commerceNotificationAttachment = ( CommerceNotificationAttachment ) session . get ( CommerceNotificationAttachmentImpl . class , primaryKey ) ; if ( commerceNotificationAttachment != null ) { cacheResult ( commerceNotificationAttachment ) ; } else { entityCache . putResult ( CommerceNotificationAttachmentModelImpl . ENTITY_CACHE_ENABLED , CommerceNotificationAttachmentImpl . class , primaryKey , nullModel ) ; } } catch ( Exception e ) { entityCache . removeResult ( CommerceNotificationAttachmentModelImpl . ENTITY_CACHE_ENABLED , CommerceNotificationAttachmentImpl . class , primaryKey ) ; throw processException ( e ) ; } finally { closeSession ( session ) ; } } return commerceNotificationAttachment ;
public class BatchDeleteScheduledActionResult { /** * The names of the scheduled actions that could not be deleted , including an error message . * @ param failedScheduledActions * The names of the scheduled actions that could not be deleted , including an error message . */ public void setFailedScheduledActions ( java . util . Collection < FailedScheduledUpdateGroupActionRequest > failedScheduledActions ) { } }
if ( failedScheduledActions == null ) { this . failedScheduledActions = null ; return ; } this . failedScheduledActions = new com . amazonaws . internal . SdkInternalList < FailedScheduledUpdateGroupActionRequest > ( failedScheduledActions ) ;
public class DatabaseManager { /** * Looks up database of a given type and path in the registry . Returns * null if there is none . */ public static synchronized Database lookupDatabaseObject ( String type , String path ) { } }
// A VoltDB extension to work around ENG - 6044 /* disabled 14 lines . . . Object key = path ; HashMap databaseMap ; if ( type = = DatabaseURL . S _ FILE ) { databaseMap = fileDatabaseMap ; key = filePathToKey ( path ) ; } else if ( type = = DatabaseURL . S _ RES ) { databaseMap = resDatabaseMap ; } else if ( type = = DatabaseURL . S _ MEM ) { databaseMap = memDatabaseMap ; } else { throw ( Error . runtimeError ( ErrorCode . U _ S0500 , " DatabaseManager . lookupDatabaseObject ( ) " ) ) ; . . . disabled 14 lines */ assert ( type == DatabaseURL . S_MEM ) ; java . util . HashMap < String , Database > databaseMap = memDatabaseMap ; String key = path ; // End of VoltDB extension return ( Database ) databaseMap . get ( key ) ;
public class Record { /** * Creates a new record , with the given parameters . * @ param name The owner name of the record . * @ param type The record ' s type . * @ param dclass The record ' s class . * @ param ttl The record ' s time to live . * @ param length The length of the record ' s data . * @ param data The rdata of the record , in uncompressed DNS wire format . Only * the first length bytes are used . */ public static Record newRecord ( Name name , int type , int dclass , long ttl , int length , byte [ ] data ) { } }
if ( ! name . isAbsolute ( ) ) throw new RelativeNameException ( name ) ; Type . check ( type ) ; DClass . check ( dclass ) ; TTL . check ( ttl ) ; DNSInput in ; if ( data != null ) in = new DNSInput ( data ) ; else in = null ; try { return newRecord ( name , type , dclass , ttl , length , in ) ; } catch ( IOException e ) { return null ; }