signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class WorkflowRuleTarget { /** * Converts a JSON workflow configuration to a workflow configuration object . * @ param json JSON for workflow rule target * @ return a workflow rule target object * @ throws IOException if unable to create object */ public static WorkflowRuleTarget fromJson ( String json ) throws IOException { } }
ObjectMapper mapper = new ObjectMapper ( ) ; return mapper . readValue ( json , WorkflowRuleTarget . class ) ;
public class Tuple3 { /** * Concatenate a tuple to this tuple . */ public final < T4 , T5 > Tuple5 < T1 , T2 , T3 , T4 , T5 > concat ( Tuple2 < T4 , T5 > tuple ) { } }
return new Tuple5 < > ( v1 , v2 , v3 , tuple . v1 , tuple . v2 ) ;
public class BeanInfoManager { /** * Returns the BeanInfoManager for the specified class */ public static BeanInfoManager getBeanInfoManager ( Class pClass ) { } }
BeanInfoManager ret = ( BeanInfoManager ) mBeanInfoManagerByClass . get ( pClass ) ; if ( ret == null ) { ret = createBeanInfoManager ( pClass ) ; } return ret ;
public class SessionProtocolNegotiationCache { /** * Clears the cache . */ public static void clear ( ) { } }
int size ; long stamp = lock . readLock ( ) ; try { size = cache . size ( ) ; if ( size == 0 ) { return ; } stamp = convertToWriteLock ( stamp ) ; size = cache . size ( ) ; cache . clear ( ) ; } finally { lock . unlock ( stamp ) ; } if ( size != 0 && logger . isDebugEnabled ( ) ) { if ( size != 1 ) { logger . debug ( "Cleared: {} entries" , size ) ; } else { logger . debug ( "Cleared: 1 entry" ) ; } }
public class MemcachedClient { /** * Asynchronously get a bunch of objects from the cache . * @ param < T > * @ param keyIter Iterator that produces keys . * @ param tcIter an iterator of transcoders to serialize and unserialize * values ; the transcoders are matched with the keys in the same * order . The minimum of the key collection length and number of * transcoders is used and no exception is thrown if they do not * match * @ return a Future result of that fetch * @ throws IllegalStateException in the rare circumstance where queue is too * full to accept any more requests */ @ Override public < T > BulkFuture < Map < String , T > > asyncGetBulk ( Iterator < String > keyIter , Iterator < Transcoder < T > > tcIter ) { } }
final Map < String , Future < T > > m = new ConcurrentHashMap < String , Future < T > > ( ) ; // This map does not need to be a ConcurrentHashMap // because it is fully populated when it is used and // used only to read the transcoder for a key . final Map < String , Transcoder < T > > tcMap = new HashMap < String , Transcoder < T > > ( ) ; // Break the gets down into groups by key final Map < MemcachedNode , Collection < String > > chunks = new HashMap < MemcachedNode , Collection < String > > ( ) ; final NodeLocator locator = mconn . getLocator ( ) ; while ( keyIter . hasNext ( ) && tcIter . hasNext ( ) ) { String key = keyIter . next ( ) ; tcMap . put ( key , tcIter . next ( ) ) ; StringUtils . validateKey ( key , opFact instanceof BinaryOperationFactory ) ; final MemcachedNode primaryNode = locator . getPrimary ( key ) ; MemcachedNode node = null ; if ( primaryNode . isActive ( ) ) { node = primaryNode ; } else { for ( Iterator < MemcachedNode > i = locator . getSequence ( key ) ; node == null && i . hasNext ( ) ; ) { MemcachedNode n = i . next ( ) ; if ( n . isActive ( ) ) { node = n ; } } if ( node == null ) { node = primaryNode ; } } assert node != null : "Didn't find a node for " + key ; Collection < String > ks = chunks . get ( node ) ; if ( ks == null ) { ks = new ArrayList < String > ( ) ; chunks . put ( node , ks ) ; } ks . add ( key ) ; } final AtomicInteger pendingChunks = new AtomicInteger ( chunks . size ( ) ) ; int initialLatchCount = chunks . isEmpty ( ) ? 0 : 1 ; final CountDownLatch latch = new CountDownLatch ( initialLatchCount ) ; final Collection < Operation > ops = new ArrayList < Operation > ( chunks . size ( ) ) ; final BulkGetFuture < T > rv = new BulkGetFuture < T > ( m , ops , latch , executorService ) ; GetOperation . Callback cb = new GetOperation . Callback ( ) { @ Override @ SuppressWarnings ( "synthetic-access" ) public void receivedStatus ( OperationStatus status ) { if ( status . getStatusCode ( ) == StatusCode . ERR_NOT_MY_VBUCKET ) { pendingChunks . addAndGet ( Integer . parseInt ( status . getMessage ( ) ) ) ; } rv . setStatus ( status ) ; } @ Override public void gotData ( String k , int flags , byte [ ] data ) { Transcoder < T > tc = tcMap . get ( k ) ; m . put ( k , tcService . decode ( tc , new CachedData ( flags , data , tc . getMaxSize ( ) ) ) ) ; } @ Override public void complete ( ) { if ( pendingChunks . decrementAndGet ( ) <= 0 ) { latch . countDown ( ) ; rv . signalComplete ( ) ; } } } ; // Now that we know how many servers it breaks down into , and the latch // is all set up , convert all of these strings collections to operations final Map < MemcachedNode , Operation > mops = new HashMap < MemcachedNode , Operation > ( ) ; for ( Map . Entry < MemcachedNode , Collection < String > > me : chunks . entrySet ( ) ) { Operation op = opFact . get ( me . getValue ( ) , cb ) ; mops . put ( me . getKey ( ) , op ) ; ops . add ( op ) ; } assert mops . size ( ) == chunks . size ( ) ; mconn . checkState ( ) ; mconn . addOperations ( mops ) ; return rv ;
public class SchematronValidatingParser { /** * Checks the content of an XML entity against the applicable rules defined * in a Schematron schema . The designated phase identifies the active * patterns ( rule sets ) ; if not specified , the default phase is executed . * @ param xmlEntity * A DOM Document representing the XML entity to validate . * @ param schemaRef * A ( classpath ) reference to a Schematron 1.5 schema . * @ param phase * The phase to execute . * @ return A NodeList containing validation errors ( it may be empty ) . */ public NodeList validate ( Document xmlEntity , String schemaRef , String phase ) { } }
if ( xmlEntity == null || xmlEntity . getDocumentElement ( ) == null ) throw new IllegalArgumentException ( "No XML entity supplied (null)." ) ; InputSource xmlInputSource = null ; try { InputStream inputStream = DocumentToInputStream ( xmlEntity ) ; xmlInputSource = new InputSource ( inputStream ) ; } catch ( IOException e ) { throw new RuntimeException ( e ) ; } PropertyMapBuilder builder = new PropertyMapBuilder ( ) ; SchematronProperty . DIAGNOSE . add ( builder ) ; if ( null != phase && ! phase . isEmpty ( ) ) { builder . put ( SchematronProperty . PHASE , phase ) ; } XmlErrorHandler errHandler = new XmlErrorHandler ( ) ; builder . put ( ValidateProperty . ERROR_HANDLER , errHandler ) ; ValidationDriver driver = createDriver ( builder . toPropertyMap ( ) ) ; InputStream schStream = this . getClass ( ) . getResourceAsStream ( schemaRef . trim ( ) ) ; try { InputSource input = new InputSource ( schStream ) ; try { boolean loaded = driver . loadSchema ( input ) ; if ( ! loaded ) { throw new Exception ( "Failed to load schema at " + schemaRef . trim ( ) + "\nIs the schema valid? Is the phase defined?" ) ; } } finally { schStream . close ( ) ; } driver . validate ( xmlInputSource ) ; } catch ( Exception e ) { throw new RuntimeException ( "Schematron validation failed." , e ) ; } NodeList errList = errHandler . toNodeList ( ) ; if ( LOGR . isLoggable ( Level . FINER ) ) { LOGR . finer ( String . format ( "Found %d Schematron rule violation(s):%n %s" , errList . getLength ( ) , errHandler . toString ( ) ) ) ; } return errList ;
public class GangliaContext { /** * Puts an integer into the buffer as 4 bytes , big - endian . */ private void xdr_int ( int i ) { } }
buffer [ offset ++ ] = ( byte ) ( ( i >> 24 ) & 0xff ) ; buffer [ offset ++ ] = ( byte ) ( ( i >> 16 ) & 0xff ) ; buffer [ offset ++ ] = ( byte ) ( ( i >> 8 ) & 0xff ) ; buffer [ offset ++ ] = ( byte ) ( i & 0xff ) ;
public class PersonGroupPersonsImpl { /** * Add a representative face to a person for identification . The input face is specified as an image with a targetFace rectangle . * @ param personGroupId Id referencing a particular person group . * @ param personId Id referencing a particular person . * @ param url Publicly reachable URL of an image * @ param addPersonFaceFromUrlOptionalParameter the object representing the optional parameters to be set before calling this API * @ param serviceCallback the async ServiceCallback to handle successful and failed responses . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the { @ link ServiceFuture } object */ public ServiceFuture < PersistedFace > addPersonFaceFromUrlAsync ( String personGroupId , UUID personId , String url , AddPersonFaceFromUrlOptionalParameter addPersonFaceFromUrlOptionalParameter , final ServiceCallback < PersistedFace > serviceCallback ) { } }
return ServiceFuture . fromResponse ( addPersonFaceFromUrlWithServiceResponseAsync ( personGroupId , personId , url , addPersonFaceFromUrlOptionalParameter ) , serviceCallback ) ;
public class ThingGroupMetadataMarshaller { /** * Marshall the given parameter object . */ public void marshall ( ThingGroupMetadata thingGroupMetadata , ProtocolMarshaller protocolMarshaller ) { } }
if ( thingGroupMetadata == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( thingGroupMetadata . getParentGroupName ( ) , PARENTGROUPNAME_BINDING ) ; protocolMarshaller . marshall ( thingGroupMetadata . getRootToParentThingGroups ( ) , ROOTTOPARENTTHINGGROUPS_BINDING ) ; protocolMarshaller . marshall ( thingGroupMetadata . getCreationDate ( ) , CREATIONDATE_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class FirewallRulesInner { /** * Gets all firewall rules in the specified redis cache . * ServiceResponse < PageImpl < RedisFirewallRuleInner > > * @ param resourceGroupName The name of the resource group . * ServiceResponse < PageImpl < RedisFirewallRuleInner > > * @ param cacheName The name of the Redis cache . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the PagedList & lt ; RedisFirewallRuleInner & gt ; object wrapped in { @ link ServiceResponse } if successful . */ public Observable < ServiceResponse < Page < RedisFirewallRuleInner > > > listByRedisResourceSinglePageAsync ( final String resourceGroupName , final String cacheName ) { } }
if ( this . client . subscriptionId ( ) == null ) { throw new IllegalArgumentException ( "Parameter this.client.subscriptionId() is required and cannot be null." ) ; } if ( resourceGroupName == null ) { throw new IllegalArgumentException ( "Parameter resourceGroupName is required and cannot be null." ) ; } if ( cacheName == null ) { throw new IllegalArgumentException ( "Parameter cacheName is required and cannot be null." ) ; } if ( this . client . apiVersion ( ) == null ) { throw new IllegalArgumentException ( "Parameter this.client.apiVersion() is required and cannot be null." ) ; } return service . listByRedisResource ( this . client . subscriptionId ( ) , resourceGroupName , cacheName , this . client . apiVersion ( ) , this . client . acceptLanguage ( ) , this . client . userAgent ( ) ) . flatMap ( new Func1 < Response < ResponseBody > , Observable < ServiceResponse < Page < RedisFirewallRuleInner > > > > ( ) { @ Override public Observable < ServiceResponse < Page < RedisFirewallRuleInner > > > call ( Response < ResponseBody > response ) { try { ServiceResponse < PageImpl < RedisFirewallRuleInner > > result = listByRedisResourceDelegate ( response ) ; return Observable . just ( new ServiceResponse < Page < RedisFirewallRuleInner > > ( result . body ( ) , result . response ( ) ) ) ; } catch ( Throwable t ) { return Observable . error ( t ) ; } } } ) ;
public class QueryUi { /** * This is the entry point method . */ public void onModuleLoad ( ) { } }
asyncGetJson ( AGGREGATORS_URL , new GotJsonCallback ( ) { public void got ( final JSONValue json ) { // Do we need more manual type checking ? Not sure what will happen // in the browser if something other than an array is returned . final JSONArray aggs = json . isArray ( ) ; for ( int i = 0 ; i < aggs . size ( ) ; i ++ ) { aggregators . add ( aggs . get ( i ) . isString ( ) . stringValue ( ) ) ; } ( ( MetricForm ) metrics . getWidget ( 0 ) ) . setAggregators ( aggregators ) ; refreshFromQueryString ( ) ; refreshGraph ( ) ; } } ) ; // All UI elements need to regenerate the graph when changed . { final ValueChangeHandler < Date > vch = new ValueChangeHandler < Date > ( ) { public void onValueChange ( final ValueChangeEvent < Date > event ) { refreshGraph ( ) ; } } ; TextBox tb = start_datebox . getTextBox ( ) ; tb . addBlurHandler ( refreshgraph ) ; tb . addKeyPressHandler ( refreshgraph ) ; start_datebox . addValueChangeHandler ( vch ) ; tb = end_datebox . getTextBox ( ) ; tb . addBlurHandler ( refreshgraph ) ; tb . addKeyPressHandler ( refreshgraph ) ; end_datebox . addValueChangeHandler ( vch ) ; } autoreoload_interval . addBlurHandler ( refreshgraph ) ; autoreoload_interval . addKeyPressHandler ( refreshgraph ) ; yrange . addBlurHandler ( refreshgraph ) ; yrange . addKeyPressHandler ( refreshgraph ) ; y2range . addBlurHandler ( refreshgraph ) ; y2range . addKeyPressHandler ( refreshgraph ) ; ylog . addClickHandler ( new AdjustYRangeCheckOnClick ( ylog , yrange ) ) ; y2log . addClickHandler ( new AdjustYRangeCheckOnClick ( y2log , y2range ) ) ; ylog . addClickHandler ( refreshgraph ) ; y2log . addClickHandler ( refreshgraph ) ; ylabel . addBlurHandler ( refreshgraph ) ; ylabel . addKeyPressHandler ( refreshgraph ) ; y2label . addBlurHandler ( refreshgraph ) ; y2label . addKeyPressHandler ( refreshgraph ) ; yformat . addBlurHandler ( refreshgraph ) ; yformat . addKeyPressHandler ( refreshgraph ) ; y2format . addBlurHandler ( refreshgraph ) ; y2format . addKeyPressHandler ( refreshgraph ) ; wxh . addBlurHandler ( refreshgraph ) ; wxh . addKeyPressHandler ( refreshgraph ) ; global_annotations . addBlurHandler ( refreshgraph ) ; global_annotations . addKeyPressHandler ( refreshgraph ) ; horizontalkey . addClickHandler ( refreshgraph ) ; keybox . addClickHandler ( refreshgraph ) ; nokey . addClickHandler ( refreshgraph ) ; smooth . addClickHandler ( refreshgraph ) ; styles . addChangeHandler ( refreshgraph ) ; yrange . setValidationRegexp ( "^(" // Nothing or + "|\\[([-+.0-9eE]+|\\*)?" // " [ start + ":([-+.0-9eE]+|\\*)?\\])$" ) ; // : end ] " yrange . setVisibleLength ( 5 ) ; yrange . setMaxLength ( 44 ) ; // MAX = 2 ^ 26 = 20 chars : " [ - $ MAX : $ MAX ] " yrange . setText ( "[0:]" ) ; y2range . setValidationRegexp ( "^(" // Nothing or + "|\\[([-+.0-9eE]+|\\*)?" // " [ start + ":([-+.0-9eE]+|\\*)?\\])$" ) ; // : end ] " y2range . setVisibleLength ( 5 ) ; y2range . setMaxLength ( 44 ) ; // MAX = 2 ^ 26 = 20 chars : " [ - $ MAX : $ MAX ] " y2range . setText ( "[0:]" ) ; y2range . setEnabled ( false ) ; y2log . setEnabled ( false ) ; ylabel . setVisibleLength ( 10 ) ; ylabel . setMaxLength ( 50 ) ; // Arbitrary limit . y2label . setVisibleLength ( 10 ) ; y2label . setMaxLength ( 50 ) ; // Arbitrary limit . y2label . setEnabled ( false ) ; yformat . setValidationRegexp ( "^(|.*%..*)$" ) ; // Nothing or at least one % ? yformat . setVisibleLength ( 10 ) ; yformat . setMaxLength ( 16 ) ; // Arbitrary limit . y2format . setValidationRegexp ( "^(|.*%..*)$" ) ; // Nothing or at least one % ? y2format . setVisibleLength ( 10 ) ; y2format . setMaxLength ( 16 ) ; // Arbitrary limit . y2format . setEnabled ( false ) ; wxh . setValidationRegexp ( "^[1-9][0-9]{2,}x[1-9][0-9]{2,}$" ) ; // 100x100 wxh . setVisibleLength ( 9 ) ; wxh . setMaxLength ( 11 ) ; // 99999x99999 wxh . setText ( ( Window . getClientWidth ( ) - 20 ) + "x" + ( Window . getClientHeight ( ) * 4 / 5 ) ) ; final FlexTable table = new FlexTable ( ) ; table . setText ( 0 , 0 , "From" ) ; { final HorizontalPanel hbox = new HorizontalPanel ( ) ; hbox . add ( new InlineLabel ( "To" ) ) ; final Anchor now = new Anchor ( "(now)" ) ; now . addClickHandler ( new ClickHandler ( ) { public void onClick ( final ClickEvent event ) { end_datebox . setValue ( new Date ( ) ) ; refreshGraph ( ) ; } } ) ; hbox . add ( now ) ; hbox . add ( autoreload ) ; hbox . setWidth ( "100%" ) ; table . setWidget ( 0 , 1 , hbox ) ; } autoreload . addValueChangeHandler ( new ValueChangeHandler < Boolean > ( ) { @ Override public void onValueChange ( final ValueChangeEvent < Boolean > event ) { if ( autoreload . getValue ( ) ) { final HorizontalPanel hbox = new HorizontalPanel ( ) ; hbox . setWidth ( "100%" ) ; hbox . add ( new InlineLabel ( "Every:" ) ) ; hbox . add ( autoreoload_interval ) ; hbox . add ( new InlineLabel ( "seconds" ) ) ; table . setWidget ( 1 , 1 , hbox ) ; if ( autoreoload_interval . getValue ( ) . isEmpty ( ) ) { autoreoload_interval . setValue ( "15" ) ; } autoreoload_interval . setFocus ( true ) ; lastgraphuri = "" ; // Force refreshGraph . refreshGraph ( ) ; // Trigger the 1st auto - reload } else { table . setWidget ( 1 , 1 , end_datebox ) ; } } } ) ; autoreoload_interval . setValidationRegexp ( "^([5-9]|[1-9][0-9]+)$" ) ; // > = 5s autoreoload_interval . setMaxLength ( 4 ) ; autoreoload_interval . setVisibleLength ( 8 ) ; table . setWidget ( 1 , 0 , start_datebox ) ; table . setWidget ( 1 , 1 , end_datebox ) ; { final HorizontalPanel hbox = new HorizontalPanel ( ) ; hbox . add ( new InlineLabel ( "WxH:" ) ) ; hbox . add ( wxh ) ; table . setWidget ( 0 , 3 , hbox ) ; } { final HorizontalPanel hbox = new HorizontalPanel ( ) ; hbox . add ( global_annotations ) ; table . setWidget ( 0 , 4 , hbox ) ; } { addMetricForm ( "metric 1" , 0 ) ; metrics . selectTab ( 0 ) ; metrics . add ( new InlineLabel ( "Loading..." ) , "+" ) ; metrics . addBeforeSelectionHandler ( new BeforeSelectionHandler < Integer > ( ) { public void onBeforeSelection ( final BeforeSelectionEvent < Integer > event ) { final int item = event . getItem ( ) ; final int nitems = metrics . getWidgetCount ( ) ; if ( item == nitems - 1 ) { // Last item : the " + " was clicked . event . cancel ( ) ; final MetricForm metric = addMetricForm ( "metric " + nitems , item ) ; metrics . selectTab ( item ) ; metric . setFocus ( true ) ; } } } ) ; table . setWidget ( 2 , 0 , metrics ) ; } table . getFlexCellFormatter ( ) . setColSpan ( 2 , 0 , 2 ) ; table . getFlexCellFormatter ( ) . setRowSpan ( 1 , 3 , 2 ) ; final DecoratedTabPanel optpanel = new DecoratedTabPanel ( ) ; optpanel . add ( makeAxesPanel ( ) , "Axes" ) ; optpanel . add ( makeKeyPanel ( ) , "Key" ) ; optpanel . add ( makeStylePanel ( ) , "Style" ) ; optpanel . selectTab ( 0 ) ; table . setWidget ( 1 , 3 , optpanel ) ; table . getFlexCellFormatter ( ) . setColSpan ( 1 , 3 , 2 ) ; final DecoratorPanel decorator = new DecoratorPanel ( ) ; decorator . setWidget ( table ) ; final VerticalPanel graphpanel = new VerticalPanel ( ) ; graphpanel . add ( decorator ) ; { final VerticalPanel graphvbox = new VerticalPanel ( ) ; graphvbox . add ( graphstatus ) ; graph . setVisible ( false ) ; // Put the graph image element and the zoombox elements inside the absolute panel graphbox . add ( graph , 0 , 0 ) ; zoom_box . setVisible ( false ) ; graphbox . add ( zoom_box , 0 , 0 ) ; graph . addMouseOverHandler ( new MouseOverHandler ( ) { public void onMouseOver ( final MouseOverEvent event ) { final Style style = graphbox . getElement ( ) . getStyle ( ) ; style . setCursor ( Cursor . CROSSHAIR ) ; } } ) ; graph . addMouseOutHandler ( new MouseOutHandler ( ) { public void onMouseOut ( final MouseOutEvent event ) { final Style style = graphbox . getElement ( ) . getStyle ( ) ; style . setCursor ( Cursor . AUTO ) ; } } ) ; graphvbox . add ( graphbox ) ; graph . addErrorHandler ( new ErrorHandler ( ) { public void onError ( final ErrorEvent event ) { graphstatus . setText ( "Oops, failed to load the graph." ) ; } } ) ; graph . addLoadHandler ( new LoadHandler ( ) { public void onLoad ( final LoadEvent event ) { graphbox . setWidth ( graph . getWidth ( ) + "px" ) ; graphbox . setHeight ( graph . getHeight ( ) + "px" ) ; } } ) ; graphpanel . add ( graphvbox ) ; } final DecoratedTabPanel mainpanel = new DecoratedTabPanel ( ) ; mainpanel . setWidth ( "100%" ) ; mainpanel . add ( graphpanel , "Graph" ) ; mainpanel . add ( stats_table , "Stats" ) ; mainpanel . add ( logs , "Logs" ) ; mainpanel . add ( build_data , "Version" ) ; mainpanel . selectTab ( 0 ) ; mainpanel . addBeforeSelectionHandler ( new BeforeSelectionHandler < Integer > ( ) { public void onBeforeSelection ( final BeforeSelectionEvent < Integer > event ) { clearError ( ) ; final int item = event . getItem ( ) ; switch ( item ) { case 1 : refreshStats ( ) ; return ; case 2 : refreshLogs ( ) ; return ; case 3 : refreshVersion ( ) ; return ; } } } ) ; final VerticalPanel root = new VerticalPanel ( ) ; root . setWidth ( "100%" ) ; root . add ( current_error ) ; current_error . setVisible ( false ) ; current_error . addStyleName ( "dateBoxFormatError" ) ; root . add ( mainpanel ) ; RootPanel . get ( "queryuimain" ) . add ( root ) ; // Must be done at the end , once all the widgets are attached . ensureSameWidgetSize ( optpanel ) ; History . addHistoryListener ( this ) ;
public class JMElasticsearchSearchAndCount { /** * Search all with target count search response . * @ param indices the indices * @ param types the types * @ param filterQueryBuilder the filter query builder * @ return the search response */ public SearchResponse searchAllWithTargetCount ( String [ ] indices , String [ ] types , QueryBuilder filterQueryBuilder ) { } }
return searchAllWithTargetCount ( indices , types , filterQueryBuilder , null ) ;
public class GetRequest { /** * < p > Convert this request into a url compatible with the WSAPI . < / p > * The current fetch and any other parameters will be included . * @ return the url representing this request . */ @ Override public String toUrl ( ) { } }
List < NameValuePair > params = new ArrayList < NameValuePair > ( getParams ( ) ) ; params . add ( new BasicNameValuePair ( "fetch" , fetch . toString ( ) ) ) ; return String . format ( "%s.js?%s" , getEndpoint ( ) , URLEncodedUtils . format ( params , "utf-8" ) ) ;
public class CommandDescriptorBlock { /** * Adds a { @ link FieldPointerSenseKeySpecificData } object to { @ link # illegalFieldPointers } . Initializes and grows * the array if necessary . * @ param illegalFieldPointer the object to add */ private final void addIllegalFieldPointer ( final FieldPointerSenseKeySpecificData illegalFieldPointer ) { } }
// grow array ? if ( illegalFieldPointers == null ) illegalFieldPointers = new FieldPointerSenseKeySpecificData [ 10 ] ; final int size = getIllegalFieldPointerSize ( ) ; if ( size >= illegalFieldPointers . length ) { // grow FieldPointerSenseKeySpecificData [ ] temp = new FieldPointerSenseKeySpecificData [ illegalFieldPointers . length + 1 ] ; for ( int i = 0 ; i < size ; ++ i ) { temp [ i ] = illegalFieldPointers [ i ] ; } illegalFieldPointers = temp ; } // add new element illegalFieldPointers [ size ] = illegalFieldPointer ;
public class MisoUtil { /** * Returns a polygon framing the specified scene footprint . * @ param x the x tile coordinate of the " upper - left " of the footprint . * @ param y the y tile coordinate of the " upper - left " of the footprint . * @ param width the width in tiles of the footprint . * @ param height the height in tiles of the footprint . */ public static Polygon getFootprintPolygon ( MisoSceneMetrics metrics , int x , int y , int width , int height ) { } }
SmartPolygon footprint = new SmartPolygon ( ) ; Point tpos = MisoUtil . tileToScreen ( metrics , x , y , new Point ( ) ) ; // start with top - center point int rx = tpos . x + metrics . tilehwid , ry = tpos . y ; footprint . addPoint ( rx , ry ) ; // right point rx += width * metrics . tilehwid ; ry += width * metrics . tilehhei ; footprint . addPoint ( rx , ry ) ; // bottom - center point rx -= height * metrics . tilehwid ; ry += height * metrics . tilehhei ; footprint . addPoint ( rx , ry ) ; // left point rx -= width * metrics . tilehwid ; ry -= width * metrics . tilehhei ; footprint . addPoint ( rx , ry ) ; // end with top - center point rx += height * metrics . tilehwid ; ry -= height * metrics . tilehhei ; footprint . addPoint ( rx , ry ) ; return footprint ;
public class Dstream { /** * Iterates through cluster _ list and prints out each grid cluster therein as a string . * @ see moa . clusterers . dstream . Dstream . cluster _ list * @ see moa . clusterers . dstream . GridCluster . toString */ public void printGridClusters ( ) { } }
System . out . println ( "List of Clusters. Total " + this . cluster_list . size ( ) + "." ) ; for ( GridCluster gc : this . cluster_list ) { System . out . println ( gc . getClusterLabel ( ) + ": " + gc . getWeight ( ) + " {" + gc . toString ( ) + "}" ) ; }
public class ManagedClustersInner { /** * Gets an access profile of a managed cluster . * Gets the accessProfile for the specified role name of the managed cluster with a specified resource group and name . * @ param resourceGroupName The name of the resource group . * @ param resourceName The name of the managed cluster resource . * @ param roleName The name of the role for managed cluster accessProfile resource . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the ManagedClusterAccessProfileInner object */ public Observable < ServiceResponse < ManagedClusterAccessProfileInner > > getAccessProfileWithServiceResponseAsync ( String resourceGroupName , String resourceName , String roleName ) { } }
if ( this . client . subscriptionId ( ) == null ) { throw new IllegalArgumentException ( "Parameter this.client.subscriptionId() is required and cannot be null." ) ; } if ( resourceGroupName == null ) { throw new IllegalArgumentException ( "Parameter resourceGroupName is required and cannot be null." ) ; } if ( resourceName == null ) { throw new IllegalArgumentException ( "Parameter resourceName is required and cannot be null." ) ; } if ( roleName == null ) { throw new IllegalArgumentException ( "Parameter roleName is required and cannot be null." ) ; } final String apiVersion = "2019-02-01" ; return service . getAccessProfile ( this . client . subscriptionId ( ) , resourceGroupName , resourceName , roleName , apiVersion , this . client . acceptLanguage ( ) , this . client . userAgent ( ) ) . flatMap ( new Func1 < Response < ResponseBody > , Observable < ServiceResponse < ManagedClusterAccessProfileInner > > > ( ) { @ Override public Observable < ServiceResponse < ManagedClusterAccessProfileInner > > call ( Response < ResponseBody > response ) { try { ServiceResponse < ManagedClusterAccessProfileInner > clientResponse = getAccessProfileDelegate ( response ) ; return Observable . just ( clientResponse ) ; } catch ( Throwable t ) { return Observable . error ( t ) ; } } } ) ;
public class AbstractInstallPlanJob { /** * Install provided extension dependency . * @ param extensionDependency the extension dependency to install * @ param namespace the namespace where to install the extension * @ param parentBranch the children of the parent { @ link DefaultExtensionPlanNode } * @ param managedDependencies the managed dependencies * @ param parents the parents extensions ( which triggered this extension install ) * @ throws InstallException error when trying to install provided extension * @ throws ResolveException * @ throws IncompatibleVersionConstraintException */ private boolean installOptionalExtensionDependency ( ExtensionDependency extensionDependency , String namespace , List < ModifableExtensionPlanNode > parentBranch , Map < String , ExtensionDependency > managedDependencies , Set < String > parents ) { } }
// Save current plan List < ModifableExtensionPlanNode > dependencyBranch = new ArrayList < > ( ) ; try { installMandatoryExtensionDependency ( extensionDependency , namespace , dependencyBranch , managedDependencies , parents ) ; parentBranch . addAll ( dependencyBranch ) ; return true ; } catch ( Throwable e ) { if ( getRequest ( ) . isVerbose ( ) ) { this . logger . warn ( "Failed to install optional dependency [{}] with error: {}" , extensionDependency , ExceptionUtils . getRootCauseMessage ( e ) ) ; } } return false ;
public class Regex { /** * Concatenates the given regex with this one . * @ param regex the regex to concatenate with this one * @ return the regex */ public Regex then ( Regex regex ) { } }
if ( regex == null ) { return this ; } return re ( this . pattern + regex . pattern ) ;
public class WonderPush { /** * Returns the last known location of the { @ link LocationManager } * or null if permission was not given . */ protected static Location getLocation ( ) { } }
Context applicationContext = getApplicationContext ( ) ; if ( applicationContext == null ) return null ; LocationManager locationManager = ( LocationManager ) applicationContext . getSystemService ( Context . LOCATION_SERVICE ) ; try { Location best = null ; for ( String provider : locationManager . getAllProviders ( ) ) { Location location ; try { location = locationManager . getLastKnownLocation ( provider ) ; } catch ( SecurityException ex ) { continue ; } // If this location is null , discard if ( null == location ) { continue ; } // If no , broken or poor accuracy , discard if ( location . getAccuracy ( ) <= 0 || location . getAccuracy ( ) >= 10000 ) { continue ; } // Skip locations old enough to belong to an older session if ( location . getTime ( ) < System . currentTimeMillis ( ) - WonderPush . DIFFERENT_SESSION_REGULAR_MIN_TIME_GAP ) { continue ; } // If we have no best yet , use this first location if ( null == best ) { best = location ; continue ; } // If this location is more than 2 minutes older than the current best , discard if ( location . getTime ( ) < best . getTime ( ) - 2 * 60 * 1000 ) { continue ; } // If this location is less precise ( ie . has a * larger * accuracy radius ) , discard if ( location . getAccuracy ( ) > best . getAccuracy ( ) ) { continue ; } best = location ; } return best ; } catch ( java . lang . SecurityException e ) { // Missing permission ; return null ; }
public class Pairs { /** * Joins the given list of encoded pairs using nameValSep for name / value separator and pairSep for pair separator . * @ param writer where the joined pairs are written * @ param pairs list of encoded pair to join * @ param pairSep character to use to join pairs * @ param nameValSep character to use to join name / value for each pair * @ throws IOException If an exception occures while writing to the writer */ public static void join ( Writer writer , List < ? extends EncodedPair > pairs , char pairSep , char nameValSep ) throws IOException { } }
join ( writer , pairs , pairSep , nameValSep , false , false ) ;
public class KoanReader { /** * Gets solution for a koan by method name . * @ param koanClass the koan class * @ param methodName the method name of the solution required * @ return the solution content to be inserted between the koan start and end markers */ public static String getSolutionFromFile ( Class < ? > koanClass , String methodName ) { } }
return getSourceFromFile ( koanClass , methodName , SOLUTION_EXTENSION ) ;
public class CPRuleLocalServiceBaseImpl { /** * Creates a new cp rule with the primary key . Does not add the cp rule to the database . * @ param CPRuleId the primary key for the new cp rule * @ return the new cp rule */ @ Override @ Transactional ( enabled = false ) public CPRule createCPRule ( long CPRuleId ) { } }
return cpRulePersistence . create ( CPRuleId ) ;
public class AbstractJaxWsWebEndpoint { /** * { @ inheritDoc } */ @ Override public void invoke ( HttpServletRequest request , HttpServletResponse response ) throws ServletException { } }
try { updateDestination ( request ) ; destination . invoke ( servletConfig , servletConfig . getServletContext ( ) , request , response ) ; } catch ( IOException e ) { throw new ServletException ( e ) ; }
public class NonBlockingHashMapLong { /** * Returns a { @ link Collection } view of the values contained in this map . * The collection is backed by the map , so changes to the map are reflected * in the collection , and vice - versa . The collection supports element * removal , which removes the corresponding mapping from this map , via the * < tt > Iterator . remove < / tt > , < tt > Collection . remove < / tt > , * < tt > removeAll < / tt > , < tt > retainAll < / tt > , and < tt > clear < / tt > operations . * It does not support the < tt > add < / tt > or < tt > addAll < / tt > operations . * < p > The view ' s < tt > iterator < / tt > is a " weakly consistent " iterator that * will never throw { @ link ConcurrentModificationException } , and guarantees * to traverse elements as they existed upon construction of the iterator , * and may ( but is not guaranteed to ) reflect any modifications subsequent * to construction . */ public Collection < TypeV > values ( ) { } }
return new AbstractCollection < TypeV > ( ) { public void clear ( ) { NonBlockingHashMapLong . this . clear ( ) ; } public int size ( ) { return NonBlockingHashMapLong . this . size ( ) ; } public boolean contains ( Object v ) { return NonBlockingHashMapLong . this . containsValue ( v ) ; } public Iterator < TypeV > iterator ( ) { return new SnapshotV ( ) ; } } ;
public class AbstractConnectionStrategy { /** * After obtaining a connection , perform additional tasks . * @ param handle * @ param statsObtainTime */ protected void postConnection ( ConnectionHandle handle , long statsObtainTime ) { } }
handle . renewConnection ( ) ; // mark it as being logically " open " // Give an application a chance to do something with it . if ( handle . getConnectionHook ( ) != null ) { handle . getConnectionHook ( ) . onCheckOut ( handle ) ; } if ( this . pool . closeConnectionWatch ) { // a debugging tool this . pool . watchConnection ( handle ) ; } if ( this . pool . statisticsEnabled ) { this . pool . statistics . addCumulativeConnectionWaitTime ( System . nanoTime ( ) - statsObtainTime ) ; }
public class SharingPeer { /** * Send a message to the peer . * Delivery of the message can only happen if the peer is connected . * @ param message The message to send to the remote peer through our peer * exchange . */ public void send ( PeerMessage message ) throws IllegalStateException { } }
logger . trace ( "Sending msg {} to {}" , message . getType ( ) , this ) ; if ( this . isConnected ( ) ) { ByteBuffer data = message . getData ( ) ; data . rewind ( ) ; connectionManager . offerWrite ( new WriteTask ( socketChannel , data , new WriteListener ( ) { @ Override public void onWriteFailed ( String message , Throwable e ) { if ( e == null ) { logger . info ( message ) ; } else if ( e instanceof ConnectionClosedException ) { logger . debug ( message , e ) ; unbind ( true ) ; } else { LoggerUtils . warnAndDebugDetails ( logger , message , e ) ; } } @ Override public void onWriteDone ( ) { } } ) , 1 , TimeUnit . SECONDS ) ; } else { logger . trace ( "Attempting to send a message to non-connected peer {}!" , this ) ; unbind ( true ) ; }
public class l3param { /** * Use this API to unset the properties of l3param resource . * Properties that need to be unset are specified in args array . */ public static base_response unset ( nitro_service client , l3param resource , String [ ] args ) throws Exception { } }
l3param unsetresource = new l3param ( ) ; return unsetresource . unset_resource ( client , args ) ;
public class AmazonEC2Client { /** * Describes the specified Spot Instance requests . * You can use < code > DescribeSpotInstanceRequests < / code > to find a running Spot Instance by examining the response . * If the status of the Spot Instance is < code > fulfilled < / code > , the instance ID appears in the response and * contains the identifier of the instance . Alternatively , you can use < a > DescribeInstances < / a > with a filter to * look for instances where the instance lifecycle is < code > spot < / code > . * We recommend that you set < code > MaxResults < / code > to a value between 5 and 1000 to limit the number of results * returned . This paginates the output , which makes the list more manageable and returns the results faster . If the * list of results exceeds your < code > MaxResults < / code > value , then that number of results is returned along with a * < code > NextToken < / code > value that can be passed to a subsequent < code > DescribeSpotInstanceRequests < / code > request * to retrieve the remaining results . * Spot Instance requests are deleted four hours after they are canceled and their instances are terminated . * @ param describeSpotInstanceRequestsRequest * Contains the parameters for DescribeSpotInstanceRequests . * @ return Result of the DescribeSpotInstanceRequests operation returned by the service . * @ sample AmazonEC2 . DescribeSpotInstanceRequests * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / ec2-2016-11-15 / DescribeSpotInstanceRequests " * target = " _ top " > AWS API Documentation < / a > */ @ Override public DescribeSpotInstanceRequestsResult describeSpotInstanceRequests ( DescribeSpotInstanceRequestsRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeDescribeSpotInstanceRequests ( request ) ;
public class MultisetJsonDeserializer { /** * @ param deserializer { @ link JsonDeserializer } used to deserialize the objects inside the { @ link Multiset } . * @ param < T > Type of the elements inside the { @ link Multiset } * @ return a new instance of { @ link MultisetJsonDeserializer } */ public static < T > MultisetJsonDeserializer < T > newInstance ( JsonDeserializer < T > deserializer ) { } }
return new MultisetJsonDeserializer < T > ( deserializer ) ;
public class ApnsServiceBuilder { /** * Specify the certificate used to connect to Apple APNS * servers . This relies on the path ( absolute or relative to * working path ) to the keystore ( * . p12 ) containing the * certificate , along with the given password . * The keystore needs to be of PKCS12 and the keystore * needs to be encrypted using the SunX509 algorithm . Both * of these settings are the default . * This library does not support password - less p12 certificates , due to a * Oracle Java library < a href = " http : / / bugs . sun . com / bugdatabase / view _ bug . do ? bug _ id = 6415637 " > * Bug 6415637 < / a > . There are three workarounds : use a password - protected * certificate , use a different boot Java SDK implementation , or construct * the ` SSLContext ` yourself ! Needless to say , the password - protected * certificate is most recommended option . * @ param fileName the path to the certificate * @ param password the password of the keystore * @ return this * @ throws RuntimeIOException if it { @ code fileName } cannot be * found or read * @ throws InvalidSSLConfig if fileName is invalid Keystore * or the password is invalid */ public ApnsServiceBuilder withCert ( String fileName , String password ) throws RuntimeIOException , InvalidSSLConfig { } }
FileInputStream stream = null ; try { stream = new FileInputStream ( fileName ) ; return withCert ( stream , password ) ; } catch ( FileNotFoundException e ) { throw new RuntimeIOException ( e ) ; } finally { Utilities . close ( stream ) ; }
public class CacheImpl { /** * / * ( non - Javadoc ) * @ see com . ibm . jaggr . service . cache . ICache # dump ( java . io . Writer , java . util . regex . Pattern ) */ @ Override public void dump ( Writer writer , Pattern filter ) throws IOException { } }
_layerCache . dump ( writer , filter ) ; _moduleCache . dump ( writer , filter ) ; _gzipCache . dump ( writer , filter ) ; for ( IGenericCache cache : _namedCaches . values ( ) ) { cache . dump ( writer , filter ) ; }
public class LdapTemplate { /** * { @ inheritDoc } */ @ Override public void modifyAttributes ( DirContextOperations ctx ) { } }
Name dn = ctx . getDn ( ) ; if ( dn != null && ctx . isUpdateMode ( ) ) { modifyAttributes ( dn , ctx . getModificationItems ( ) ) ; } else { throw new IllegalStateException ( "The DirContextOperations instance needs to be properly initialized." ) ; }
public class GuiRenderer { /** * Starts clipping an area to prevent drawing outside of it . * @ param area the area */ public void startClipping ( ClipArea area ) { } }
if ( area . noClip ( ) ) return ; GL11 . glPushAttrib ( GL11 . GL_SCISSOR_BIT ) ; GL11 . glEnable ( GL11 . GL_SCISSOR_TEST ) ; int f = ignoreScale ? 1 : scaleFactor ; int x = area . x * f ; int y = Minecraft . getMinecraft ( ) . displayHeight - ( area . y + area . height ( ) ) * f ; int w = area . width ( ) * f ; int h = area . height ( ) * f ; ; GL11 . glScissor ( x , y , w , h ) ;
public class RowToObjectMapper { /** * Build the structures necessary to do the mapping * @ throws SQLException on error . */ protected void getFieldMappings ( ) throws SQLException { } }
final String [ ] keys = getKeysFromResultSet ( ) ; // find fields or setters for return class HashMap < String , AccessibleObject > mapFields = new HashMap < String , AccessibleObject > ( _columnCount * 2 ) ; for ( int i = 1 ; i <= _columnCount ; i ++ ) { mapFields . put ( keys [ i ] , null ) ; } // public methods Method [ ] classMethods = _returnTypeClass . getMethods ( ) ; for ( Method m : classMethods ) { if ( isSetterMethod ( m ) ) { final String fieldName = m . getName ( ) . substring ( 3 ) . toUpperCase ( ) ; if ( mapFields . containsKey ( fieldName ) ) { // check for overloads Object field = mapFields . get ( fieldName ) ; if ( field == null ) { mapFields . put ( fieldName , m ) ; } else { throw new ControlException ( "Unable to choose between overloaded methods " + m . getName ( ) + " on the " + _returnTypeClass . getName ( ) + " class. Mapping is done using " + "a case insensitive comparision of SQL ResultSet columns to field " + "names and public setter methods on the return class." ) ; } } } } // fix for 8813 : include inherited and non - public fields for ( Class clazz = _returnTypeClass ; clazz != null && clazz != Object . class ; clazz = clazz . getSuperclass ( ) ) { Field [ ] classFields = clazz . getDeclaredFields ( ) ; for ( Field f : classFields ) { if ( Modifier . isStatic ( f . getModifiers ( ) ) ) continue ; if ( ! Modifier . isPublic ( f . getModifiers ( ) ) ) continue ; String fieldName = f . getName ( ) . toUpperCase ( ) ; if ( ! mapFields . containsKey ( fieldName ) ) continue ; Object field = mapFields . get ( fieldName ) ; if ( field == null ) { mapFields . put ( fieldName , f ) ; } } } // finally actually init the fields array _fields = new AccessibleObject [ _columnCount + 1 ] ; _fieldTypes = new int [ _columnCount + 1 ] ; for ( int i = 1 ; i < _fields . length ; i ++ ) { AccessibleObject f = mapFields . get ( keys [ i ] ) ; if ( f == null ) { throw new ControlException ( "Unable to map the SQL column " + keys [ i ] + " to a field on the " + _returnTypeClass . getName ( ) + " class. Mapping is done using a case insensitive comparision of SQL ResultSet " + "columns to field names and public setter methods on the return class." ) ; } _fields [ i ] = f ; if ( f instanceof Field ) { _fieldTypes [ i ] = _tmf . getTypeId ( ( ( Field ) f ) . getType ( ) ) ; } else { _fieldTypes [ i ] = _tmf . getTypeId ( ( ( Method ) f ) . getParameterTypes ( ) [ 0 ] ) ; } }
public class HttpConnectionInterceptorContext { /** * Retrieve the state object associated with the specified interceptor instance and property * name on this request context . * @ param interceptor the interceptor instance * @ param stateName the name key that the state object was stored under * @ param stateType class of the type the stored state should be returned as * @ param < T > the type the stored state should be returned as * @ return the stored state object * @ see # setState ( HttpConnectionInterceptor , String , Object ) * @ since 2.6.0 */ public < T > T getState ( HttpConnectionInterceptor interceptor , String stateName , Class < T > stateType ) { } }
Map < String , Object > state = interceptorStates . get ( interceptor ) ; if ( state != null ) { return stateType . cast ( state . get ( stateName ) ) ; } else { return null ; }
public class TagLibraryCache { /** * called from AbstractJspModC , no need to cache */ public void loadTldsFromJar ( URL url , String resourcePath , List loadedLocations , JspXmlExtConfig webAppConfig ) { } }
if ( com . ibm . ejs . ras . TraceComponent . isAnyTracingEnabled ( ) && logger . isLoggable ( Level . FINE ) ) { logger . logp ( Level . FINE , CLASS_NAME , "loadTldsFromJar" , "url [" + url + "]" + "resourcePath [" + resourcePath + "] loadedLocations [" + loadedLocations + "] webAppConfig [" + webAppConfig + "]" ) ; } JarFile jarFile = null ; InputStream stream = null ; String name = null ; try { JarURLConnection conn = ( JarURLConnection ) url . openConnection ( ) ; conn . setUseCaches ( false ) ; jarFile = conn . getJarFile ( ) ; String originatorId = jarFile . getName ( ) ; originatorId = originatorId . substring ( 0 , originatorId . indexOf ( ".jar" ) ) ; if ( originatorId . indexOf ( File . separatorChar ) != - 1 ) originatorId = originatorId . substring ( originatorId . lastIndexOf ( File . separatorChar ) + 1 ) ; originatorId = NameMangler . mangleString ( originatorId ) ; Enumeration entries = jarFile . entries ( ) ; while ( entries . hasMoreElements ( ) ) { JarEntry entry = ( JarEntry ) entries . nextElement ( ) ; name = entry . getName ( ) ; if ( name . startsWith ( "META-INF/" ) && name . endsWith ( ".tld" ) && loadedLocations . contains ( resourcePath + "/" + name ) == false ) { stream = jarFile . getInputStream ( entry ) ; JspInputSource tldInputSource = getInputSource ( container , name , null , url ) ; // TagLibraryInfoImpl tli = loadSerializedTld ( tldInputSource , inputSource ) ; // if ( tli = = null ) { try { TagLibraryInfoImpl tli = tldParser . parseTLD ( tldInputSource , stream , originatorId ) ; // 516822 - If no URI is defined in the tag , we still want to load it in case it has listeners // use the resourcePath + " / " + name as the key String uri = null ; if ( tli . getReliableURN ( ) != null && tli . getReliableURN ( ) . trim ( ) . equals ( "" ) == false ) { uri = tli . getReliableURN ( ) ; } else { uri = resourcePath + "/" + name ; } // if ( tli . getReliableURN ( ) ! = null & & tli . getReliableURN ( ) . trim ( ) . equals ( " " ) = = false ) { tli . setURI ( uri ) ; if ( com . ibm . ejs . ras . TraceComponent . isAnyTracingEnabled ( ) && logger . isLoggable ( Level . FINE ) ) { logger . logp ( Level . FINE , CLASS_NAME , "loadTldsFromJar" , "webAppConfig is " + webAppConfig ) ; } if ( com . ibm . ejs . ras . TraceComponent . isAnyTracingEnabled ( ) && logger . isLoggable ( Level . FINE ) && webAppConfig != null ) { logger . logp ( Level . FINE , CLASS_NAME , "loadTldsFromJar" , "tli URN is " + uri + " :webAppConfig.getTagLibMap() is " + webAppConfig . getTagLibMap ( ) + " :webAppConfig.getTagLibMap().containsKey(uri) is " + webAppConfig . getTagLibMap ( ) . containsKey ( uri ) + " :containsKey(uri) is " + containsKey ( uri ) ) ; } if ( ( webAppConfig != null && webAppConfig . getTagLibMap ( ) . containsKey ( uri ) == false ) || ( webAppConfig == null && containsKey ( uri ) == false ) ) { if ( com . ibm . ejs . ras . TraceComponent . isAnyTracingEnabled ( ) && logger . isLoggable ( Level . FINE ) ) { logger . logp ( Level . FINE , CLASS_NAME , "loadTldsFromJar" , "jar tld loaded for {0}" , uri ) ; } put ( uri , tli ) ; // serializeTld ( tldInputSource , tli ) ; eventListenerList . addAll ( tldParser . getEventListenerList ( ) ) ; tagListForInjection . addAll ( tldParser . getParsedTagsList ( ) ) ; if ( com . ibm . ejs . ras . TraceComponent . isAnyTracingEnabled ( ) && logger . isLoggable ( Level . FINE ) ) { logger . logp ( Level . FINE , CLASS_NAME , "loadTldsFromJar" , "tldParser.getEventListenerList() [" + tldParser . getEventListenerList ( ) + "]" ) ; } } } catch ( JspCoreException e ) { if ( com . ibm . ejs . ras . TraceComponent . isAnyTracingEnabled ( ) && logger . isLoggable ( Level . WARNING ) ) { logger . logp ( Level . WARNING , CLASS_NAME , "loadTldsFromJar" , "jsp error failed to load tld in jar. uri = [" + resourcePath + "]" , e ) ; } } stream . close ( ) ; stream = null ; } } } catch ( Exception e ) { if ( com . ibm . ejs . ras . TraceComponent . isAnyTracingEnabled ( ) && logger . isLoggable ( Level . WARNING ) ) { logger . logp ( Level . WARNING , CLASS_NAME , "loadTldsFromJar" , "jsp error failed to load tld in jar. uri = [" + resourcePath + "]" , e ) ; } } finally { if ( stream != null ) { try { stream . close ( ) ; } catch ( Throwable t ) { } } if ( jarFile != null ) { try { jarFile . close ( ) ; } catch ( Throwable t ) { } } }
public class SignalFxNamingConvention { /** * Metric ( the metric name ) can be any non - empty UTF - 8 string , with a maximum length < = 256 characters */ @ Override public String name ( String name , Meter . Type type , @ Nullable String baseUnit ) { } }
String formattedName = StringEscapeUtils . escapeJson ( delegate . name ( name , type , baseUnit ) ) ; return StringUtils . truncate ( formattedName , NAME_MAX_LENGTH ) ;
public class AbstractProject { /** * Checks if there ' s any update in SCM , and returns true if any is found . * The implementation is responsible for ensuring mutual exclusion between polling and builds * if necessary . * @ since 1.345 */ public PollingResult poll ( TaskListener listener ) { } }
SCM scm = getScm ( ) ; if ( scm == null ) { listener . getLogger ( ) . println ( Messages . AbstractProject_NoSCM ( ) ) ; return NO_CHANGES ; } if ( ! isBuildable ( ) ) { listener . getLogger ( ) . println ( Messages . AbstractProject_Disabled ( ) ) ; return NO_CHANGES ; } SCMDecisionHandler veto = SCMDecisionHandler . firstShouldPollVeto ( this ) ; if ( veto != null ) { listener . getLogger ( ) . println ( Messages . AbstractProject_PollingVetoed ( veto ) ) ; return NO_CHANGES ; } R lb = getLastBuild ( ) ; if ( lb == null ) { listener . getLogger ( ) . println ( Messages . AbstractProject_NoBuilds ( ) ) ; return isInQueue ( ) ? NO_CHANGES : BUILD_NOW ; } if ( pollingBaseline == null ) { R success = getLastSuccessfulBuild ( ) ; // if we have a persisted baseline , we ' ll find it by this for ( R r = lb ; r != null ; r = r . getPreviousBuild ( ) ) { SCMRevisionState s = r . getAction ( SCMRevisionState . class ) ; if ( s != null ) { pollingBaseline = s ; break ; } if ( r == success ) break ; // searched far enough } // NOTE - NO - BASELINE : // if we don ' t have baseline yet , it means the data is built by old Hudson that doesn ' t set the baseline // as action , so we need to compute it . This happens later . } try { SCMPollListener . fireBeforePolling ( this , listener ) ; PollingResult r = _poll ( listener , scm ) ; SCMPollListener . firePollingSuccess ( this , listener , r ) ; return r ; } catch ( AbortException e ) { listener . getLogger ( ) . println ( e . getMessage ( ) ) ; listener . fatalError ( Messages . AbstractProject_Aborted ( ) ) ; LOGGER . log ( Level . FINE , "Polling " + this + " aborted" , e ) ; SCMPollListener . firePollingFailed ( this , listener , e ) ; return NO_CHANGES ; } catch ( IOException e ) { Functions . printStackTrace ( e , listener . fatalError ( e . getMessage ( ) ) ) ; SCMPollListener . firePollingFailed ( this , listener , e ) ; return NO_CHANGES ; } catch ( InterruptedException e ) { Functions . printStackTrace ( e , listener . fatalError ( Messages . AbstractProject_PollingABorted ( ) ) ) ; SCMPollListener . firePollingFailed ( this , listener , e ) ; return NO_CHANGES ; } catch ( RuntimeException e ) { SCMPollListener . firePollingFailed ( this , listener , e ) ; throw e ; } catch ( Error e ) { SCMPollListener . firePollingFailed ( this , listener , e ) ; throw e ; }
public class PersistentExecutorImpl { /** * Find all task IDs for tasks that match the specified partition id and the presence or absence * ( as determined by the inState attribute ) of the specified state . * For example , to find taskIDs for the first 100 tasks in partition 12 that have not completed all executions , * taskStore . findTaskIds ( 12 , TaskState . ENDED , false , null , 100 ) ; * This method is for the mbean only . * @ param partition identifier of the partition in which to search for tasks . * @ param state a task state . For example , TaskState . SCHEDULED * @ param inState indicates whether to include or exclude results with the specified state * @ param minId minimum value for task id to be returned in the results . A null value means no minimum . * @ param maxResults limits the number of results to return to the specified maximum value . A null value means no limit . * @ return in - memory , ordered list of task ID . * @ throws Exception if an error occurs when attempting to access the persistent task store . */ Long [ ] findTaskIds ( long partition , TaskState state , boolean inState , Long minId , Integer maxResults ) throws Exception { } }
Long [ ] results = null ; TransactionController tranController = new TransactionController ( ) ; try { tranController . preInvoke ( ) ; List < Long > ids = taskStore . findTaskIds ( null , null , state , inState , minId , maxResults , null , partition ) ; results = ids . toArray ( new Long [ ids . size ( ) ] ) ; } catch ( Throwable x ) { tranController . setFailure ( x ) ; } finally { Exception x = tranController . postInvoke ( Exception . class ) ; if ( x != null ) throw x ; } return results ;
public class GridFile { /** * Verifies whether child is a child ( dir or file ) of parent * @ param parent * @ param child * @ return True if child is a child , false otherwise */ protected static boolean isChildOf ( String parent , String child ) { } }
if ( parent == null || child == null ) return false ; if ( ! child . startsWith ( ( parent . endsWith ( SEPARATOR ) ? parent : parent + SEPARATOR ) ) ) return false ; if ( child . length ( ) <= parent . length ( ) ) return false ; int from = parent . equals ( SEPARATOR ) ? parent . length ( ) : parent . length ( ) + 1 ; // if ( from - 1 > child . length ( ) ) // return false ; String [ ] comps = Util . components ( child . substring ( from ) , SEPARATOR ) ; return comps != null && comps . length <= 1 ;
public class GrpcSerializationUtils { /** * Creates a service definition that uses custom marshallers . * @ param service the service to intercept * @ param marshallers a map that specifies which marshaller to use for each method * @ return the new service definition */ public static ServerServiceDefinition overrideMethods ( final ServerServiceDefinition service , final Map < MethodDescriptor , MethodDescriptor > marshallers ) { } }
List < ServerMethodDefinition < ? , ? > > newMethods = new ArrayList < ServerMethodDefinition < ? , ? > > ( ) ; List < MethodDescriptor < ? , ? > > newDescriptors = new ArrayList < MethodDescriptor < ? , ? > > ( ) ; // intercepts the descriptors for ( final ServerMethodDefinition < ? , ? > definition : service . getMethods ( ) ) { ServerMethodDefinition < ? , ? > newMethod = interceptMethod ( definition , marshallers ) ; newDescriptors . add ( newMethod . getMethodDescriptor ( ) ) ; newMethods . add ( newMethod ) ; } // builds the new service descriptor final ServerServiceDefinition . Builder serviceBuilder = ServerServiceDefinition . builder ( new ServiceDescriptor ( service . getServiceDescriptor ( ) . getName ( ) , newDescriptors ) ) ; // creates the new service definition for ( ServerMethodDefinition < ? , ? > definition : newMethods ) { serviceBuilder . addMethod ( definition ) ; } return serviceBuilder . build ( ) ;
public class CachedConfigurationSource { /** * Get configuration set for a given { @ code environment } from the cache . For cache to be seeded * you have to call the { @ link # reload ( Environment ) } method before calling this method . Otherwise * the method will throw { @ link MissingEnvironmentException } . * @ param environment environment to use * @ return configuration set for { @ code environment } * @ throws MissingEnvironmentException when there ' s no config for the given environment in the cache */ @ Override public Properties getConfiguration ( Environment environment ) { } }
if ( cachedConfigurationPerEnvironment . containsKey ( environment . getName ( ) ) ) { return cachedConfigurationPerEnvironment . get ( environment . getName ( ) ) ; } else { throw new MissingEnvironmentException ( environment . getName ( ) ) ; }
public class IdentifierType { /** * Returns true if the requested property is set ; false , otherwise . * @ return * returned object is { @ link boolean } */ public boolean isSet ( String propName ) { } }
if ( propName . equals ( "uniqueId" ) ) { return isSetUniqueId ( ) ; } if ( propName . equals ( "uniqueName" ) ) { return isSetUniqueName ( ) ; } if ( propName . equals ( "externalId" ) ) { return isSetExternalId ( ) ; } if ( propName . equals ( "externalName" ) ) { return isSetExternalName ( ) ; } if ( propName . equals ( "repositoryId" ) ) { return isSetRepositoryId ( ) ; } return false ;
public class Calendar { /** * Fires the given calendar event to all event handlers currently registered * with this calendar . * @ param evt the event to fire */ public final void fireEvent ( CalendarEvent evt ) { } }
if ( fireEvents && ! batchUpdates ) { if ( MODEL . isLoggable ( FINER ) ) { MODEL . finer ( getName ( ) + ": fireing event: " + evt ) ; // $ NON - NLS - 1 $ } requireNonNull ( evt ) ; Event . fireEvent ( this , evt ) ; }
public class PropertyWriterImpl { /** * { @ inheritDoc } */ public Content getPropertyDocTreeHeader ( MethodDoc property , Content propertyDetailsTree ) { } }
propertyDetailsTree . addContent ( writer . getMarkerAnchor ( property . name ( ) ) ) ; Content propertyDocTree = writer . getMemberTreeHeader ( ) ; Content heading = new HtmlTree ( HtmlConstants . MEMBER_HEADING ) ; heading . addContent ( property . name ( ) . substring ( 0 , property . name ( ) . lastIndexOf ( "Property" ) ) ) ; propertyDocTree . addContent ( heading ) ; return propertyDocTree ;
public class MongoService { /** * Get a Mongo DB instance , authenticated with the specified user and password if specified . * @ param databaseName the database name . * @ return com . mongodb . DB instance * @ throws Exception if an error occurs . */ @ FFDCIgnore ( InvocationTargetException . class ) Object getDB ( String databaseName ) throws Exception { } }
final boolean trace = TraceComponent . isAnyTracingEnabled ( ) ; lock . readLock ( ) . lock ( ) ; try { if ( mongoClient == null ) { // Switch to write lock for lazy initialization lock . readLock ( ) . unlock ( ) ; lock . writeLock ( ) . lock ( ) ; try { if ( mongoClient == null ) init ( ) ; } finally { // Downgrade to read lock for rest of method lock . readLock ( ) . lock ( ) ; lock . writeLock ( ) . unlock ( ) ; } } Object db = MongoClient_getDB . invoke ( mongoClient , databaseName ) ; // authentication String user = ( String ) props . get ( USER ) ; if ( user != null ) { if ( ( Boolean ) DB_isAuthenticated . invoke ( db ) ) { if ( trace && tc . isDebugEnabled ( ) ) Tr . debug ( this , tc , "already authenticated" ) ; } else { if ( trace && tc . isDebugEnabled ( ) ) Tr . debug ( this , tc , "authenticate as: " + user ) ; SerializableProtectedString password = ( SerializableProtectedString ) props . get ( PASSWORD ) ; String pwdStr = password == null ? null : String . valueOf ( password . getChars ( ) ) ; pwdStr = PasswordUtil . getCryptoAlgorithm ( pwdStr ) == null ? pwdStr : PasswordUtil . decode ( pwdStr ) ; char [ ] pwdChars = pwdStr == null ? null : pwdStr . toCharArray ( ) ; try { if ( ! ( Boolean ) DB_authenticate . invoke ( db , user , pwdChars ) ) if ( ( Boolean ) DB_isAuthenticated . invoke ( db ) ) { if ( trace && tc . isDebugEnabled ( ) ) Tr . debug ( this , tc , "another thread must have authenticated first" ) ; } else throw new IllegalArgumentException ( Tr . formatMessage ( tc , "CWKKD0012.authentication.error" , MONGO , id , databaseName ) ) ; } catch ( InvocationTargetException x ) { // If already authenticated , Mongo raises : // IllegalStateException : can ' t authenticate twice on the same database // Maybe another thread did the authentication right after we checked , so check again . Throwable cause = x . getCause ( ) ; if ( cause instanceof IllegalStateException && ( Boolean ) DB_isAuthenticated . invoke ( db ) ) { if ( trace && tc . isDebugEnabled ( ) ) Tr . debug ( this , tc , "another thread must have authenticated first" , cause ) ; } else throw cause ; } } } else if ( useCertAuth ) { // If we specified a certificate we will already have used the client constructor that // specified the credential so if we have got to here we are already authenticated and // JIT should remove this so it will not be an overhead . } return db ; } catch ( Throwable x ) { // rethrowing the exception allows it to be captured in FFDC and traced automatically x = x instanceof InvocationTargetException ? x . getCause ( ) : x ; if ( x instanceof Exception ) throw ( Exception ) x ; else if ( x instanceof Error ) throw ( Error ) x ; else throw new RuntimeException ( x ) ; } finally { lock . readLock ( ) . unlock ( ) ; }
public class CommerceUserSegmentEntryLocalServiceBaseImpl { /** * Returns a range of all the commerce user segment entries . * Useful when paginating results . Returns a maximum of < code > end - start < / code > instances . < code > start < / code > and < code > end < / code > are not primary keys , they are indexes in the result set . Thus , < code > 0 < / code > refers to the first result in the set . Setting both < code > start < / code > and < code > end < / code > to { @ link com . liferay . portal . kernel . dao . orm . QueryUtil # ALL _ POS } will return the full result set . If < code > orderByComparator < / code > is specified , then the query will include the given ORDER BY logic . If < code > orderByComparator < / code > is absent and pagination is required ( < code > start < / code > and < code > end < / code > are not { @ link com . liferay . portal . kernel . dao . orm . QueryUtil # ALL _ POS } ) , then the query will include the default ORDER BY logic from { @ link com . liferay . commerce . user . segment . model . impl . CommerceUserSegmentEntryModelImpl } . If both < code > orderByComparator < / code > and pagination are absent , for performance reasons , the query will not have an ORDER BY clause and the returned result set will be sorted on by the primary key in an ascending order . * @ param start the lower bound of the range of commerce user segment entries * @ param end the upper bound of the range of commerce user segment entries ( not inclusive ) * @ return the range of commerce user segment entries */ @ Override public List < CommerceUserSegmentEntry > getCommerceUserSegmentEntries ( int start , int end ) { } }
return commerceUserSegmentEntryPersistence . findAll ( start , end ) ;
public class BufferedWriter { /** * Resets the output stream for a new connection . */ public void reset ( ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { // 306998.15 Tr . debug ( tc , "reset" ) ; } out = null ; // obs = null ; count = 0 ; total = 0L ; limit = - 1L ; length = - 1L ; committed = false ; _hasWritten = false ; _hasFlushed = false ; response = null ;
public class StateUtils { /** * Does nothing if the user has disabled the SecretKey cache . This is * useful when dealing with a JCA provider whose SecretKey * implementation is not thread safe . * Instantiates a SecretKey instance based upon what the user has * specified in the deployment descriptor . The SecretKey is then * stored in application scope where it can be used for all requests . */ public static void initSecret ( ServletContext ctx ) { } }
if ( ctx == null ) { throw new NullPointerException ( "ServletContext ctx" ) ; } if ( log . isLoggable ( Level . FINE ) ) { log . fine ( "Storing SecretKey @ " + INIT_SECRET_KEY_CACHE ) ; } // Create and store SecretKey on application scope String cache = ctx . getInitParameter ( INIT_SECRET_KEY_CACHE ) ; if ( cache == null ) { cache = ctx . getInitParameter ( INIT_SECRET_KEY_CACHE . toLowerCase ( ) ) ; } if ( ! "false" . equals ( cache ) ) { String algorithm = findAlgorithm ( ctx ) ; // you want to create this as few times as possible ctx . setAttribute ( INIT_SECRET_KEY_CACHE , new SecretKeySpec ( findSecret ( ctx , algorithm ) , algorithm ) ) ; } if ( log . isLoggable ( Level . FINE ) ) { log . fine ( "Storing SecretKey @ " + INIT_MAC_SECRET_KEY_CACHE ) ; } String macCache = ctx . getInitParameter ( INIT_MAC_SECRET_KEY_CACHE ) ; if ( macCache == null ) { macCache = ctx . getInitParameter ( INIT_MAC_SECRET_KEY_CACHE . toLowerCase ( ) ) ; } if ( ! "false" . equals ( macCache ) ) { String macAlgorithm = findMacAlgorithm ( ctx ) ; // init mac secret and algorithm ctx . setAttribute ( INIT_MAC_SECRET_KEY_CACHE , new SecretKeySpec ( findMacSecret ( ctx , macAlgorithm ) , macAlgorithm ) ) ; }
public class Xsd2CobolTypesModelBuilder { /** * Retrieve the properties of an alphanumeric type . * @ param facets the XSD facets * @ return the properties of an alphanumeric type */ private < T extends Number > Map < String , Object > getCobolAlphanumType ( List < XmlSchemaFacet > facets ) { } }
Map < String , Object > props = new LinkedHashMap < String , Object > ( ) ; props . put ( COBOL_TYPE_NAME_PROP_NAME , "CobolStringType" ) ; props . put ( CHAR_NUM_PROP_NAME , getMaxLength ( facets ) ) ; props . put ( JAVA_TYPE_NAME_PROP_NAME , getShortTypeName ( String . class ) ) ; return props ;
public class Compare { /** * Compare the MovieDB object with a title and year * @ param moviedb The moviedb object to compare too * @ param title The title of the movie to compare * @ param year The year of the movie to compare * @ param maxDistance The Levenshtein Distance between the two titles . 0 = * exact match * @ param caseSensitive true if the comparison is to be case sensitive * @ return True if there is a match , False otherwise . */ public static boolean movies ( final MovieInfo moviedb , final String title , final String year , int maxDistance , boolean caseSensitive ) { } }
if ( ( moviedb == null ) || ( StringUtils . isBlank ( title ) ) ) { return false ; } String primaryTitle , firstCompareTitle , secondCompareTitle ; if ( caseSensitive ) { primaryTitle = title ; firstCompareTitle = moviedb . getOriginalTitle ( ) ; secondCompareTitle = moviedb . getTitle ( ) ; } else { primaryTitle = title . toLowerCase ( ) ; firstCompareTitle = moviedb . getTitle ( ) . toLowerCase ( ) ; secondCompareTitle = moviedb . getOriginalTitle ( ) . toLowerCase ( ) ; } if ( isValidYear ( year ) && isValidYear ( moviedb . getReleaseDate ( ) ) ) { // Compare with year String movieYear = moviedb . getReleaseDate ( ) . substring ( 0 , YEAR_LENGTH ) ; return movieYear . equals ( year ) && compareTitles ( primaryTitle , firstCompareTitle , secondCompareTitle , maxDistance ) ; } // Compare without year return compareTitles ( primaryTitle , firstCompareTitle , secondCompareTitle , maxDistance ) ;
public class ULocale { /** * < strong > [ icu ] < / strong > Returns a locale ' s country localized for display in the provided locale . * < b > Warning : < / b > this is for the region part of a valid locale ID ; it cannot just be the region code ( like " FR " ) . * To get the display name for a region alone , or for other options , use { @ link LocaleDisplayNames } instead . * This is a cover for the ICU4C API . * @ param localeID the id of the locale whose country will be displayed * @ param displayLocaleID the id of the locale in which to display the name . * @ return the localized country name . */ public static String getDisplayCountry ( String localeID , String displayLocaleID ) { } }
return getDisplayCountryInternal ( new ULocale ( localeID ) , new ULocale ( displayLocaleID ) ) ;
public class MongoDBBasicOperations { /** * Delete the object against the given primary key . * @ param primaryID * the primary key * @ return the record that was removed */ @ Override public T delete ( X primaryID ) { } }
if ( primaryID == null ) { return null ; } T entity = get ( primaryID ) ; if ( entity == null ) { return null ; } this . mongoTemplate . remove ( entity ) ; return entity ;
public class ExecutionEntity { /** * Restores a complete process instance tree including referenced entities . * @ param executions * the list of all executions that are part of this process instance . * Cannot be null , must include the process instance execution itself . * @ param eventSubscriptions * the list of all event subscriptions that are linked to executions which is part of this process instance * If null , event subscriptions are not initialized and lazy loaded on demand * @ param variables * the list of all variables that are linked to executions which are part of this process instance * If null , variables are not initialized and are lazy loaded on demand * @ param jobs * @ param tasks * @ param incidents */ public void restoreProcessInstance ( Collection < ExecutionEntity > executions , Collection < EventSubscriptionEntity > eventSubscriptions , Collection < VariableInstanceEntity > variables , Collection < TaskEntity > tasks , Collection < JobEntity > jobs , Collection < IncidentEntity > incidents , Collection < ExternalTaskEntity > externalTasks ) { } }
if ( ! isProcessInstanceExecution ( ) ) { throw LOG . restoreProcessInstanceException ( this ) ; } // index executions by id Map < String , ExecutionEntity > executionsMap = new HashMap < > ( ) ; for ( ExecutionEntity execution : executions ) { executionsMap . put ( execution . getId ( ) , execution ) ; } Map < String , List < VariableInstanceEntity > > variablesByScope = new HashMap < > ( ) ; if ( variables != null ) { for ( VariableInstanceEntity variable : variables ) { CollectionUtil . addToMapOfLists ( variablesByScope , variable . getVariableScopeId ( ) , variable ) ; } } // restore execution tree for ( ExecutionEntity execution : executions ) { if ( execution . executions == null ) { execution . executions = new ArrayList < > ( ) ; } if ( execution . eventSubscriptions == null && eventSubscriptions != null ) { execution . eventSubscriptions = new ArrayList < > ( ) ; } if ( variables != null ) { execution . variableStore . setVariablesProvider ( new VariableCollectionProvider < > ( variablesByScope . get ( execution . id ) ) ) ; } String parentId = execution . getParentId ( ) ; ExecutionEntity parent = executionsMap . get ( parentId ) ; if ( ! execution . isProcessInstanceExecution ( ) ) { if ( parent == null ) { throw LOG . resolveParentOfExecutionFailedException ( parentId , execution . getId ( ) ) ; } execution . processInstance = this ; execution . parent = parent ; if ( parent . executions == null ) { parent . executions = new ArrayList < > ( ) ; } parent . executions . add ( execution ) ; } else { execution . processInstance = execution ; } } if ( eventSubscriptions != null ) { // add event subscriptions to the right executions in the tree for ( EventSubscriptionEntity eventSubscription : eventSubscriptions ) { ExecutionEntity executionEntity = executionsMap . get ( eventSubscription . getExecutionId ( ) ) ; if ( executionEntity != null ) { executionEntity . addEventSubscription ( eventSubscription ) ; } else { throw LOG . executionNotFoundException ( eventSubscription . getExecutionId ( ) ) ; } } } if ( jobs != null ) { for ( JobEntity job : jobs ) { ExecutionEntity execution = executionsMap . get ( job . getExecutionId ( ) ) ; job . setExecution ( execution ) ; } } if ( tasks != null ) { for ( TaskEntity task : tasks ) { ExecutionEntity execution = executionsMap . get ( task . getExecutionId ( ) ) ; task . setExecution ( execution ) ; execution . addTask ( task ) ; if ( variables != null ) { task . variableStore . setVariablesProvider ( new VariableCollectionProvider < > ( variablesByScope . get ( task . id ) ) ) ; } } } if ( incidents != null ) { for ( IncidentEntity incident : incidents ) { ExecutionEntity execution = executionsMap . get ( incident . getExecutionId ( ) ) ; incident . setExecution ( execution ) ; } } if ( externalTasks != null ) { for ( ExternalTaskEntity externalTask : externalTasks ) { ExecutionEntity execution = executionsMap . get ( externalTask . getExecutionId ( ) ) ; externalTask . setExecution ( execution ) ; execution . addExternalTask ( externalTask ) ; } }
public class Polygon { /** * Creates a Polygon from the given perimeter and holes . * @ param perimeter The perimeter { @ link LinearRing } . * @ param holes The holes { @ link LinearRing } Stream . * @ return Polygon */ public static Polygon of ( LinearRing perimeter , Stream < LinearRing > holes ) { } }
return new Polygon ( AreaPositions . builder ( ) . addLinearPosition ( perimeter . positions ( ) ) . addLinearPositions ( holes . map ( LinearRing :: positions ) :: iterator ) . build ( ) ) ;
public class AbstractBigtableAdmin { /** * Creates a snapshot from an existing table . NOTE : Cloud Bigtable has a cleanup policy * @ param snapshotName a { @ link String } object . * @ param tableName a { @ link TableName } object . * @ return a { @ link Operation } object . * @ throws IOException if any . */ protected Operation snapshotTable ( String snapshotName , TableName tableName ) throws IOException { } }
SnapshotTableRequest . Builder requestBuilder = SnapshotTableRequest . newBuilder ( ) . setCluster ( getSnapshotClusterName ( ) . toString ( ) ) . setSnapshotId ( snapshotName ) . setName ( options . getInstanceName ( ) . toTableNameStr ( tableName . getNameAsString ( ) ) ) ; int ttlSecs = configuration . getInt ( BigtableOptionsFactory . BIGTABLE_SNAPSHOT_DEFAULT_TTL_SECS_KEY , - 1 ) ; if ( ttlSecs > 0 ) { requestBuilder . setTtl ( Duration . newBuilder ( ) . setSeconds ( ttlSecs ) . build ( ) ) ; } ApiFuture < Operation > future = tableAdminClientWrapper . snapshotTableAsync ( requestBuilder . build ( ) ) ; return Futures . getChecked ( future , IOException . class ) ;
public class StickyValueHandler { /** * Set the field that owns this handler . * @ param owner The field this listener was added to ( or null if being removed ) . */ public void setOwner ( ListenerOwner owner ) { } }
if ( owner == null ) { this . saveValue ( m_recordOwnerCache ) ; m_recordOwnerCache = null ; } super . setOwner ( owner ) ; if ( owner != null ) this . retrieveValue ( ) ;
public class BottomSheet { /** * Initializes the bottom sheet ' s root view . */ private void inflateRootView ( ) { } }
ViewGroup contentView = findViewById ( android . R . id . content ) ; contentView . removeAllViews ( ) ; LayoutInflater layoutInflater = LayoutInflater . from ( getContext ( ) ) ; rootView = ( DraggableView ) layoutInflater . inflate ( R . layout . bottom_sheet , contentView , false ) ; rootView . setCallback ( this ) ; contentView . addView ( rootView , createRootViewLayoutParams ( ) ) ;
public class Utils { /** * Helper method for translating ( _ X , _ Y ) scroll vectors into scalar rotation of a circle . * @ param _ Dx The _ X component of the current scroll vector . * @ param _ Dy The _ Y component of the current scroll vector . * @ param _ X The _ X position of the current touch , relative to the circle center . * @ param _ Y The _ Y position of the current touch , relative to the circle center . * @ return The scalar representing the change in angular position for this scroll . */ public static float vectorToScalarScroll ( float _Dx , float _Dy , float _X , float _Y ) { } }
// get the length of the vector float l = ( float ) Math . sqrt ( _Dx * _Dx + _Dy * _Dy ) ; // decide if the scalar should be negative or positive by finding // the dot product of the vector perpendicular to ( _ X , _ Y ) . float crossX = - _Y ; float crossY = _X ; float dot = ( crossX * _Dx + crossY * _Dy ) ; float sign = Math . signum ( dot ) ; return l * sign ;
public class GCCBEZImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ SuppressWarnings ( "unchecked" ) @ Override public void eSet ( int featureID , Object newValue ) { } }
switch ( featureID ) { case AfplibPackage . GCCBEZ__RG : getRg ( ) . clear ( ) ; getRg ( ) . addAll ( ( Collection < ? extends GCCBEZRG > ) newValue ) ; return ; } super . eSet ( featureID , newValue ) ;
public class JoinTypeUtil { /** * Gets { @ link FlinkJoinType } of the input Join RelNode . */ public static FlinkJoinType getFlinkJoinType ( Join join ) { } }
if ( join instanceof SemiJoin ) { // TODO supports ANTI return FlinkJoinType . SEMI ; } else { return toFlinkJoinType ( join . getJoinType ( ) ) ; }
public class XmlUtils { /** * Removes any whitespace from the text nodes that are descendants * of the given node . Any text node that becomes empty due to this * ( that is , any text node that only contained whitespaces ) will * be removed . * @ param node The node to remove whitespaces from */ static void removeWhitespace ( Node node ) { } }
NodeList childList = node . getChildNodes ( ) ; List < Node > toRemove = new ArrayList < Node > ( ) ; for ( int i = 0 ; i < childList . getLength ( ) ; i ++ ) { Node child = childList . item ( i ) ; if ( child . getNodeType ( ) == Node . TEXT_NODE ) { String text = child . getTextContent ( ) ; String trimmed = text . trim ( ) ; if ( trimmed . isEmpty ( ) ) { toRemove . add ( child ) ; } else if ( trimmed . length ( ) < text . length ( ) ) { child . setTextContent ( trimmed ) ; } } removeWhitespace ( child ) ; } for ( Node c : toRemove ) { node . removeChild ( c ) ; }
public class CommerceAddressPersistenceImpl { /** * Returns the first commerce address in the ordered set where groupId = & # 63 ; and classNameId = & # 63 ; and classPK = & # 63 ; . * @ param groupId the group ID * @ param classNameId the class name ID * @ param classPK the class pk * @ param orderByComparator the comparator to order the set by ( optionally < code > null < / code > ) * @ return the first matching commerce address , or < code > null < / code > if a matching commerce address could not be found */ @ Override public CommerceAddress fetchByG_C_C_First ( long groupId , long classNameId , long classPK , OrderByComparator < CommerceAddress > orderByComparator ) { } }
List < CommerceAddress > list = findByG_C_C ( groupId , classNameId , classPK , 0 , 1 , orderByComparator ) ; if ( ! list . isEmpty ( ) ) { return list . get ( 0 ) ; } return null ;
public class SeleniumGridListener { /** * Closes selenium session when suite finished to run * @ see org . testng . ISuiteListener # onFinish ( org . testng . ISuite ) */ @ Override public void onFinish ( ISuite suite ) { } }
logger . entering ( suite ) ; if ( ListenerManager . isCurrentMethodSkipped ( this ) ) { logger . exiting ( ListenerManager . THREAD_EXCLUSION_MSG ) ; return ; } LocalGridManager . shutDownHub ( ) ; logger . exiting ( ) ;
public class PartitionImpl { /** * If not already created , a new < code > mapper < / code > element with the given value will be created . * Otherwise , the existing < code > mapper < / code > element will be returned . * @ return a new or existing instance of < code > PartitionMapper < Partition < T > > < / code > */ public PartitionMapper < Partition < T > > getOrCreateMapper ( ) { } }
Node node = childNode . getOrCreate ( "mapper" ) ; PartitionMapper < Partition < T > > mapper = new PartitionMapperImpl < Partition < T > > ( this , "mapper" , childNode , node ) ; return mapper ;
public class HashPartition { public void prepareProbePhase ( IOManager ioAccess , Channel . Enumerator probeChannelEnumerator , LinkedBlockingQueue < MemorySegment > bufferReturnQueue ) throws IOException { } }
if ( isInMemory ( ) ) { return ; } // ATTENTION : The following lines are duplicated code from finalizeBuildPhase this . probeSideChannel = ioAccess . createBlockChannelWriter ( probeChannelEnumerator . next ( ) , bufferReturnQueue ) ; this . probeSideBuffer = new ChannelWriterOutputView ( this . probeSideChannel , this . memorySegmentSize ) ;
public class ESigList { /** * Returns the count of items of the specified type . * @ param eSigType The esignature type . * @ return Count of items of specified type . */ public int getCount ( IESigType eSigType ) { } }
int result = 0 ; for ( ESigItem item : items ) { if ( item . getESigType ( ) . equals ( eSigType ) ) { result ++ ; } } return result ;
public class GedWriter { /** * Loop through the lines from the line creator and write them to the * stream . * @ param stream the stream to write to * @ param charset the string encoding to use * @ throws IOException if there is a problem writing to the stream */ private void writeTheLines ( final BufferedOutputStream stream , final String charset ) throws IOException { } }
for ( final GedWriterLine line : visitor . getLines ( ) ) { if ( line . getLine ( ) . isEmpty ( ) ) { continue ; } final String string = line . getLine ( ) ; stream . write ( string . getBytes ( charset ) ) ; stream . write ( '\n' ) ; }
public class BaseTemplate { /** * Renders an XML declaration header . If the declaration encoding is set in the * { @ link TemplateConfiguration # getDeclarationEncoding ( ) template configuration } , * then the encoding is rendered into the declaration . * @ return this template instance * @ throws IOException */ public BaseTemplate xmlDeclaration ( ) throws IOException { } }
out . write ( "<?xml " ) ; writeAttribute ( "version" , "1.0" ) ; if ( configuration . getDeclarationEncoding ( ) != null ) { writeAttribute ( " encoding" , configuration . getDeclarationEncoding ( ) ) ; } out . write ( "?>" ) ; out . write ( configuration . getNewLineString ( ) ) ; return this ;
public class ChangeObjects { /** * method to replace the MonomerNotationList having the MonomerID with the * new MonomerID * @ param object * MonomerNotationList to change * @ param existingMonomerID * monomer to change * @ param newMonomerID * new monomer * @ return MonomerNotationList , if it had the old MonomerID , null otherwise * @ throws NotationException * if notation is not valid * @ throws ChemistryException * if chemistry could not be initialized * @ throws CTKException * general ChemToolKit exception passed to HELMToolKit * @ throws MonomerLoadingException * could not load monomers from source */ public final static MonomerNotationList replaceMonomerNotationList ( MonomerNotationList object , String existingMonomerID , String newMonomerID ) throws NotationException , ChemistryException , CTKException , MonomerLoadingException { } }
MonomerNotationList newObject = null ; boolean hasChanged = false ; StringBuilder sb = new StringBuilder ( ) ; String id = "" ; for ( MonomerNotation element : object . getListofMonomerUnits ( ) ) { if ( element instanceof MonomerNotationUnitRNA ) { List < String > result = generateIDForNucleotide ( ( ( MonomerNotationUnitRNA ) element ) , existingMonomerID , newMonomerID ) ; id = result . get ( 0 ) ; if ( result . get ( 1 ) != null ) { hasChanged = true ; } } else { if ( element . getUnit ( ) . equals ( existingMonomerID ) ) { hasChanged = true ; id = generateIDMonomerNotation ( newMonomerID , element . getCount ( ) , element . getAnnotation ( ) ) ; } else { id = generateIDMonomerNotation ( element . getUnit ( ) , element . getCount ( ) , element . getAnnotation ( ) ) ; } } sb . append ( id + "." ) ; } if ( hasChanged ) { sb . setLength ( sb . length ( ) - 1 ) ; newObject = new MonomerNotationList ( sb . toString ( ) , object . getType ( ) ) ; newObject . setCount ( object . getCount ( ) ) ; if ( object . isAnnotationTrue ( ) ) { newObject . setAnnotation ( object . getAnnotation ( ) ) ; } } return newObject ;
public class Backend { /** * transaction is created . */ public static < T > T read ( Class < T > clazz , Object id ) { } }
return execute ( new ReadEntityTransaction < T > ( clazz , id ) ) ;
public class LookupManagerImpl { /** * { @ inheritDoc } */ @ Override public List < ServiceInstance > getAllInstances ( ) throws ServiceException { } }
if ( ! isStarted ) { ServiceDirectoryError error = new ServiceDirectoryError ( ErrorCode . SERVICE_DIRECTORY_MANAGER_FACTORY_CLOSED ) ; throw new ServiceException ( error ) ; } List < ServiceInstance > instances = null ; List < ModelServiceInstance > allInstances = getLookupService ( ) . getAllInstances ( ) ; for ( ModelServiceInstance serviceInstance : allInstances ) { if ( instances == null ) { instances = new ArrayList < ServiceInstance > ( ) ; } instances . add ( ServiceInstanceUtils . transferFromModelServiceInstance ( serviceInstance ) ) ; } if ( instances == null ) { return Collections . emptyList ( ) ; } return instances ;
public class Gradient { /** * Set a knot blend type . * @ param n the knot index * @ param type the knot blend type * @ see # getKnotBlend */ public void setKnotBlend ( int n , int type ) { } }
knotTypes [ n ] = ( byte ) ( ( knotTypes [ n ] & ~ BLEND_MASK ) | type ) ; rebuildGradient ( ) ;
public class ClientRegistry { /** * Add the client to the registry */ private void addClient ( String id , IClient client ) { } }
// check to see if the id already exists first if ( ! hasClient ( id ) ) { clients . put ( id , client ) ; } else { log . debug ( "Client id: {} already registered" , id ) ; }
public class ConnectionManager { /** * This method returns a boolean value indicating whether * or not CCI Local Transaction support is provided * by the resource adapter . */ private boolean raSupportsCCILocalTran ( ManagedConnectionFactory mcf ) throws ResourceException { } }
final boolean isTraceOn = TraceComponent . isAnyTracingEnabled ( ) ; ConnectionFactory cf ; ResourceAdapterMetaData raMetaData ; boolean cciLocalTranSupported = false ; if ( isTraceOn && tc . isEntryEnabled ( ) ) Tr . entry ( this , tc , "raSupportsCCILocalTran" ) ; if ( gConfigProps . transactionSupport == TransactionSupportLevel . XATransaction || gConfigProps . transactionSupport == TransactionSupportLevel . LocalTransaction ) { cf = ( ConnectionFactory ) mcf . createConnectionFactory ( this ) ; raMetaData = cf . getMetaData ( ) ; if ( raMetaData != null ) cciLocalTranSupported = raMetaData . supportsLocalTransactionDemarcation ( ) ; } if ( isTraceOn && tc . isEntryEnabled ( ) ) Tr . exit ( this , tc , "raSupportsCCILocalTran " + cciLocalTranSupported ) ; return cciLocalTranSupported ;
public class InternalSARLLexer { /** * $ ANTLR start " RULE _ COMMENT _ RICH _ TEXT _ END " */ public final void mRULE_COMMENT_RICH_TEXT_END ( ) throws RecognitionException { } }
try { int _type = RULE_COMMENT_RICH_TEXT_END ; int _channel = DEFAULT_TOKEN_CHANNEL ; // InternalSARL . g : 48779:28 : ( ' \ \ uFFFD \ \ uFFFD ' ( ~ ( ( ' \ \ n ' | ' \ \ r ' ) ) ) * ( ( ' \ \ r ' ) ? ' \ \ n ' ( RULE _ IN _ RICH _ STRING ) * ( ' \ \ ' \ \ ' \ \ ' ' | ( ' \ \ ' ' ( ' \ \ ' ' ) ? ) ? EOF ) | EOF ) ) // InternalSARL . g : 48779:30 : ' \ \ uFFFD \ \ uFFFD ' ( ~ ( ( ' \ \ n ' | ' \ \ r ' ) ) ) * ( ( ' \ \ r ' ) ? ' \ \ n ' ( RULE _ IN _ RICH _ STRING ) * ( ' \ \ ' \ \ ' \ \ ' ' | ( ' \ \ ' ' ( ' \ \ ' ' ) ? ) ? EOF ) | EOF ) { match ( "\uFFFD\uFFFD" ) ; // InternalSARL . g : 48779:45 : ( ~ ( ( ' \ \ n ' | ' \ \ r ' ) ) ) * loop28 : do { int alt28 = 2 ; int LA28_0 = input . LA ( 1 ) ; if ( ( ( LA28_0 >= '\u0000' && LA28_0 <= '\t' ) || ( LA28_0 >= '\u000B' && LA28_0 <= '\f' ) || ( LA28_0 >= '\u000E' && LA28_0 <= '\uFFFF' ) ) ) { alt28 = 1 ; } switch ( alt28 ) { case 1 : // InternalSARL . g : 48779:45 : ~ ( ( ' \ \ n ' | ' \ \ r ' ) ) { if ( ( input . LA ( 1 ) >= '\u0000' && input . LA ( 1 ) <= '\t' ) || ( input . LA ( 1 ) >= '\u000B' && input . LA ( 1 ) <= '\f' ) || ( input . LA ( 1 ) >= '\u000E' && input . LA ( 1 ) <= '\uFFFF' ) ) { input . consume ( ) ; } else { MismatchedSetException mse = new MismatchedSetException ( null , input ) ; recover ( mse ) ; throw mse ; } } break ; default : break loop28 ; } } while ( true ) ; // InternalSARL . g : 48779:61 : ( ( ' \ \ r ' ) ? ' \ \ n ' ( RULE _ IN _ RICH _ STRING ) * ( ' \ \ ' \ \ ' \ \ ' ' | ( ' \ \ ' ' ( ' \ \ ' ' ) ? ) ? EOF ) | EOF ) int alt34 = 2 ; int LA34_0 = input . LA ( 1 ) ; if ( ( LA34_0 == '\n' || LA34_0 == '\r' ) ) { alt34 = 1 ; } else { alt34 = 2 ; } switch ( alt34 ) { case 1 : // InternalSARL . g : 48779:62 : ( ' \ \ r ' ) ? ' \ \ n ' ( RULE _ IN _ RICH _ STRING ) * ( ' \ \ ' \ \ ' \ \ ' ' | ( ' \ \ ' ' ( ' \ \ ' ' ) ? ) ? EOF ) { // InternalSARL . g : 48779:62 : ( ' \ \ r ' ) ? int alt29 = 2 ; int LA29_0 = input . LA ( 1 ) ; if ( ( LA29_0 == '\r' ) ) { alt29 = 1 ; } switch ( alt29 ) { case 1 : // InternalSARL . g : 48779:62 : ' \ \ r ' { match ( '\r' ) ; } break ; } match ( '\n' ) ; // InternalSARL . g : 48779:73 : ( RULE _ IN _ RICH _ STRING ) * loop30 : do { int alt30 = 2 ; int LA30_0 = input . LA ( 1 ) ; if ( ( LA30_0 == '\'' ) ) { int LA30_1 = input . LA ( 2 ) ; if ( ( LA30_1 == '\'' ) ) { int LA30_4 = input . LA ( 3 ) ; if ( ( ( LA30_4 >= '\u0000' && LA30_4 <= '&' ) || ( LA30_4 >= '(' && LA30_4 <= '\uFFFC' ) || ( LA30_4 >= '\uFFFE' && LA30_4 <= '\uFFFF' ) ) ) { alt30 = 1 ; } } else if ( ( ( LA30_1 >= '\u0000' && LA30_1 <= '&' ) || ( LA30_1 >= '(' && LA30_1 <= '\uFFFC' ) || ( LA30_1 >= '\uFFFE' && LA30_1 <= '\uFFFF' ) ) ) { alt30 = 1 ; } } else if ( ( ( LA30_0 >= '\u0000' && LA30_0 <= '&' ) || ( LA30_0 >= '(' && LA30_0 <= '\uFFFC' ) || ( LA30_0 >= '\uFFFE' && LA30_0 <= '\uFFFF' ) ) ) { alt30 = 1 ; } switch ( alt30 ) { case 1 : // InternalSARL . g : 48779:73 : RULE _ IN _ RICH _ STRING { mRULE_IN_RICH_STRING ( ) ; } break ; default : break loop30 ; } } while ( true ) ; // InternalSARL . g : 48779:94 : ( ' \ \ ' \ \ ' \ \ ' ' | ( ' \ \ ' ' ( ' \ \ ' ' ) ? ) ? EOF ) int alt33 = 2 ; int LA33_0 = input . LA ( 1 ) ; if ( ( LA33_0 == '\'' ) ) { int LA33_1 = input . LA ( 2 ) ; if ( ( LA33_1 == '\'' ) ) { int LA33_3 = input . LA ( 3 ) ; if ( ( LA33_3 == '\'' ) ) { alt33 = 1 ; } else { alt33 = 2 ; } } else { alt33 = 2 ; } } else { alt33 = 2 ; } switch ( alt33 ) { case 1 : // InternalSARL . g : 48779:95 : ' \ \ ' \ \ ' \ \ ' ' { match ( "'''" ) ; } break ; case 2 : // InternalSARL . g : 48779:104 : ( ' \ \ ' ' ( ' \ \ ' ' ) ? ) ? EOF { // InternalSARL . g : 48779:104 : ( ' \ \ ' ' ( ' \ \ ' ' ) ? ) ? int alt32 = 2 ; int LA32_0 = input . LA ( 1 ) ; if ( ( LA32_0 == '\'' ) ) { alt32 = 1 ; } switch ( alt32 ) { case 1 : // InternalSARL . g : 48779:105 : ' \ \ ' ' ( ' \ \ ' ' ) ? { match ( '\'' ) ; // InternalSARL . g : 48779:110 : ( ' \ \ ' ' ) ? int alt31 = 2 ; int LA31_0 = input . LA ( 1 ) ; if ( ( LA31_0 == '\'' ) ) { alt31 = 1 ; } switch ( alt31 ) { case 1 : // InternalSARL . g : 48779:110 : ' \ \ ' ' { match ( '\'' ) ; } break ; } } break ; } match ( EOF ) ; } break ; } } break ; case 2 : // InternalSARL . g : 48779:123 : EOF { match ( EOF ) ; } break ; } } state . type = _type ; state . channel = _channel ; } finally { }
public class SID { /** * Serializes to byte array . * @ return serialized SID . */ public byte [ ] toByteArray ( ) { } }
// variable content size depending on sub authorities number final ByteBuffer buff = ByteBuffer . allocate ( getSize ( ) ) ; buff . put ( revision ) ; buff . put ( NumberFacility . getBytes ( subAuthorities . size ( ) ) [ 3 ] ) ; buff . put ( identifierAuthority ) ; for ( byte [ ] sub : subAuthorities ) { buff . put ( Hex . reverse ( sub ) ) ; } return buff . array ( ) ;
public class WikibaseDataFetcher { /** * Sets the value for the API ' s " languages " parameter based on the current * settings . * @ param properties * current setting of parameters */ private void setRequestLanguages ( WbGetEntitiesActionData properties ) { } }
if ( this . filter . excludeAllLanguages ( ) || this . filter . getLanguageFilter ( ) == null ) { return ; } properties . languages = ApiConnection . implodeObjects ( this . filter . getLanguageFilter ( ) ) ;
public class Search { /** * Obtain the query factory for building DSL based Ickle queries . */ public static QueryFactory getQueryFactory ( Cache < ? , ? > cache ) { } }
if ( cache == null || cache . getAdvancedCache ( ) == null ) { throw new IllegalArgumentException ( "cache parameter shall not be null" ) ; } AdvancedCache < ? , ? > advancedCache = cache . getAdvancedCache ( ) ; ensureAccessPermissions ( advancedCache ) ; EmbeddedQueryEngine queryEngine = SecurityActions . getCacheComponentRegistry ( advancedCache ) . getComponent ( EmbeddedQueryEngine . class ) ; if ( queryEngine == null ) { throw log . queryModuleNotInitialised ( ) ; } return new EmbeddedQueryFactory ( queryEngine ) ;
public class Logger { /** * Issue a formatted log message with a level of DEBUG . * @ param t the throwable * @ param format the format string , as per { @ link String # format ( String , Object . . . ) } * @ param params the parameters */ public void debugf ( Throwable t , String format , Object ... params ) { } }
doLogf ( Level . DEBUG , FQCN , format , params , t ) ;
public class Calendar { /** * Validate a single field of this calendar given its minimum and * maximum allowed value . If the field is out of range , throw a * descriptive < code > IllegalArgumentException < / code > . Subclasses may * use this method in their implementation of { @ link * # validateField ( int ) } . */ protected final void validateField ( int field , int min , int max ) { } }
int value = fields [ field ] ; if ( value < min || value > max ) { throw new IllegalArgumentException ( fieldName ( field ) + '=' + value + ", valid range=" + min + ".." + max ) ; }
public class SVGParser { /** * [ [ < ' font - style ' > | | < ' font - variant ' > | | < ' font - weight ' > ] ? < ' font - size ' > [ / < ' line - height ' > ] ? < ' font - family ' > ] | caption | icon | menu | message - box | small - caption | status - bar | inherit */ private static void parseFont ( Style style , String val ) { } }
Integer fontWeight = null ; Style . FontStyle fontStyle = null ; String fontVariant = null ; // Start by checking for the fixed size standard system font names ( which we don ' t support ) if ( ! "|caption|icon|menu|message-box|small-caption|status-bar|" . contains ( '|' + val + '|' ) ) return ; // First part : style / variant / weight ( opt - one or more ) TextScanner scan = new TextScanner ( val ) ; String item ; while ( true ) { item = scan . nextToken ( '/' ) ; scan . skipWhitespace ( ) ; if ( item == null ) return ; if ( fontWeight != null && fontStyle != null ) break ; if ( item . equals ( "normal" ) ) // indeterminate which of these this refers to continue ; if ( fontWeight == null ) { fontWeight = FontWeightKeywords . get ( item ) ; if ( fontWeight != null ) continue ; } if ( fontStyle == null ) { fontStyle = parseFontStyle ( item ) ; if ( fontStyle != null ) continue ; } // Must be a font - variant keyword ? if ( fontVariant == null && item . equals ( "small-caps" ) ) { fontVariant = item ; continue ; } // Not any of these . Break and try next section break ; } // Second part : font size ( reqd ) and line - height ( opt ) Length fontSize = parseFontSize ( item ) ; // Check for line - height ( which we don ' t support ) if ( scan . consume ( '/' ) ) { scan . skipWhitespace ( ) ; item = scan . nextToken ( ) ; if ( item != null ) { try { parseLength ( item ) ; } catch ( SVGParseException e ) { return ; } } scan . skipWhitespace ( ) ; } // Third part : font family style . fontFamily = parseFontFamily ( scan . restOfText ( ) ) ; style . fontSize = fontSize ; style . fontWeight = ( fontWeight == null ) ? Style . FONT_WEIGHT_NORMAL : fontWeight ; style . fontStyle = ( fontStyle == null ) ? Style . FontStyle . Normal : fontStyle ; style . specifiedFlags |= ( SVG . SPECIFIED_FONT_FAMILY | SVG . SPECIFIED_FONT_SIZE | SVG . SPECIFIED_FONT_WEIGHT | SVG . SPECIFIED_FONT_STYLE ) ;
public class SvdlibcDenseTextFileTransformer { /** * { @ inheritDoc } */ public File transform ( File inputFile , File outFile , GlobalTransform transform ) { } }
try { BufferedReader br = new BufferedReader ( new FileReader ( inputFile ) ) ; PrintWriter writer = new PrintWriter ( new BufferedWriter ( new FileWriter ( outFile ) ) ) ; // Read in the header for the matrix . String line = br . readLine ( ) ; String [ ] rowCol = line . split ( "\\s+" ) ; int numRows = Integer . parseInt ( rowCol [ 0 ] ) ; int numCols = Integer . parseInt ( rowCol [ 1 ] ) ; // Write out the header for the matrix . writer . printf ( "%d %d\n" , numRows , numCols ) ; // Traverse each row . for ( int row = 0 ; ( line = br . readLine ( ) ) != null && row < numRows ; ++ row ) { // Traverse each entry in the matrix and transform the value for // the new matrix . String [ ] values = line . split ( "\\s+" ) ; StringBuilder sb = new StringBuilder ( values . length * 4 ) ; for ( int col = 0 ; col < numCols ; ++ col ) { double value = Double . parseDouble ( values [ col ] ) ; sb . append ( transform . transform ( row , col , value ) ) . append ( " " ) ; } writer . println ( sb . toString ( ) ) ; } writer . close ( ) ; } catch ( IOException ioe ) { throw new IOError ( ioe ) ; } return outFile ;
public class PHS398ChecklistV1_3Generator { /** * This method returns PHS398ChecklistDocument object based on proposal * development document which contains the PHS398ChecklistDocument * informations * ApplicationType , FederalID , ChangeOfPDPI , FormerPDName , ChangeOfInstitution , * FormerInstitutionName , InventionsAndPatents , ProgramIncome and * CertificationExplanation for a particular proposal */ private PHS398Checklist13Document getPHS398Checklist ( ) { } }
PHS398Checklist13Document phsChecklistDocument = PHS398Checklist13Document . Factory . newInstance ( ) ; PHS398Checklist13 phsChecklist = PHS398Checklist13 . Factory . newInstance ( ) ; answerHeaders = getPropDevQuestionAnswerService ( ) . getQuestionnaireAnswerHeaders ( pdDoc . getDevelopmentProposal ( ) . getProposalNumber ( ) ) ; setPhsCheckListBasicProperties ( phsChecklist ) ; setFormerPDNameAndIsChangeOfPDPI ( phsChecklist ) ; setFormerInstitutionNameAndChangeOfInstitution ( phsChecklist ) ; setIsInventionsAndPatentsAndIsPreviouslyReported ( phsChecklist ) ; ProposalDevelopmentBudgetExtContract budget = s2SCommonBudgetService . getBudget ( pdDoc . getDevelopmentProposal ( ) ) ; if ( budget != null ) { int numPeriods = budget . getBudgetPeriods ( ) . size ( ) ; setIncomeBudgetPeriods ( phsChecklist , budget . getBudgetProjectIncomes ( ) ) ; } else { phsChecklist . setProgramIncome ( YesNoDataType . N_NO ) ; } ynqAnswer = getYNQAnswer ( YNQANSWER_121 ) ; phsChecklist . setDisclosurePermission ( ynqAnswer ) ; phsChecklistDocument . setPHS398Checklist13 ( phsChecklist ) ; return phsChecklistDocument ;
public class HelpTopicNode { /** * Returns the first sibling node after this one . * @ return Next sibling node ( may be null ) . */ public HelpTopicNode getNextSibling ( ) { } }
int i = getIndex ( ) + 1 ; return i == 0 || i == parent . children . size ( ) ? null : parent . children . get ( i ) ;
public class BaseMoskitoUIAction { /** * Returns the currently selected unit either from request or session . * @ param req * @ param saveToSession - if true the request parameter will be saved into session . * @ return */ protected UnitBean getCurrentUnit ( HttpServletRequest req , boolean saveToSession ) { } }
String unitParameter = req . getParameter ( PARAM_UNIT ) ; if ( unitParameter == null ) { UnitBean ret = ( UnitBean ) req . getSession ( ) . getAttribute ( BEAN_UNIT ) ; if ( ret == null ) { ret = DEFAULT_UNIT_BEAN ; // ensure a unit bean is always in the session . req . getSession ( ) . setAttribute ( BEAN_UNIT , ret ) ; } return ret ; } int index = - 1 ; for ( int i = 0 ; i < AVAILABLE_UNITS_LIST . size ( ) ; i ++ ) { if ( AVAILABLE_UNITS_LIST . get ( i ) . getUnitName ( ) . equalsIgnoreCase ( unitParameter ) ) { index = i ; break ; } } UnitBean ret = index == - 1 ? DEFAULT_UNIT_BEAN : AVAILABLE_UNITS [ index ] ; if ( saveToSession ) req . getSession ( ) . setAttribute ( BEAN_UNIT , ret ) ; return ret ;
public class SnappyServer { /** * Define a REST endpoint mapped to HTTP GET with default path " / " as url * @ param endpoint The endpoint handler * @ param mediaTypes ( Optional ) The accepted and returned types for this endpoint */ public static void get ( HttpConsumer < HttpExchange > endpoint , MediaTypes ... mediaTypes ) { } }
addResource ( Methods . GET , HandlerUtil . BASE_PATH , endpoint , mediaTypes ) ;
public class StrBuilder { /** * Validates parameters defining a range of the builder . * @ param startIndex the start index , inclusive , must be valid * @ param endIndex the end index , exclusive , must be valid except * that if too large it is treated as end of string * @ return the new string * @ throws IndexOutOfBoundsException if the index is invalid */ protected int validateRange ( final int startIndex , int endIndex ) { } }
if ( startIndex < 0 ) { throw new StringIndexOutOfBoundsException ( startIndex ) ; } if ( endIndex > size ) { endIndex = size ; } if ( startIndex > endIndex ) { throw new StringIndexOutOfBoundsException ( "end < start" ) ; } return endIndex ;
public class nspbrs { /** * Use this API to clear nspbrs . */ public static base_response clear ( nitro_service client ) throws Exception { } }
nspbrs clearresource = new nspbrs ( ) ; return clearresource . perform_operation ( client , "clear" ) ;
public class cmppolicy { /** * Use this API to update cmppolicy . */ public static base_response update ( nitro_service client , cmppolicy resource ) throws Exception { } }
cmppolicy updateresource = new cmppolicy ( ) ; updateresource . name = resource . name ; updateresource . rule = resource . rule ; updateresource . resaction = resource . resaction ; return updateresource . update_resource ( client ) ;
public class TwoPhaseAnnotationProcessor { /** * Report a warning detected during the " check " phase . The presence of warnings * will not affect execution of the " generate " phase . */ public void printWarning ( Declaration d , String id , Object ... args ) { } }
addWarning ( d , id , args ) ;
public class HandlerChainInfoBuilder { /** * Build the handlerChain info from web . xml * @ param hChain * @ return */ public static HandlerChainInfo buildHandlerChainInfoFromXML ( HandlerChain hChain ) { } }
HandlerChainInfo hcInfo = new HandlerChainInfo ( ) ; // set Service QName if ( hChain . getServiceNamePattern ( ) != null ) { hcInfo . setServiceNamePattern ( new QName ( hChain . getServiceNamePattern ( ) . getNamespaceURI ( ) , hChain . getServiceNamePattern ( ) . getLocalPart ( ) ) ) ; } else { hcInfo . setServiceNamePattern ( new QName ( "*" ) ) ; } // set Port QName if ( hChain . getPortNamePattern ( ) != null ) { hcInfo . setPortNamePattern ( new QName ( hChain . getPortNamePattern ( ) . getNamespaceURI ( ) , hChain . getPortNamePattern ( ) . getLocalPart ( ) ) ) ; } else { hcInfo . setPortNamePattern ( new QName ( "*" ) ) ; } // add protocol bindings hcInfo . addProtocolBindings ( hChain . getProtocolBindings ( ) ) ; for ( com . ibm . ws . javaee . dd . common . wsclient . Handler handler : hChain . getHandlers ( ) ) { hcInfo . addHandlerInfo ( buildHandlerInfoFromXML ( handler ) ) ; } return hcInfo ;
public class SuperToast { /** * Sets the { @ link com . github . johnpersano . supertoasts . library . SuperToast . OnDismissListener } * of the SuperActivityToast . The listener will be triggered when the * SuperActivityToast is dismissed . * @ param tag A unique tag for this listener * @ param token A Parcelable token to hold data across orientation changes * @ param onDismissListener The desired OnDismissListener * @ return The current SuperActivityToast instance * @ see # setOnDismissListener ( String , * com . github . johnpersano . supertoasts . library . SuperToast . OnDismissListener ) */ protected SuperToast setOnDismissListener ( String tag , Parcelable token , @ NonNull OnDismissListener onDismissListener ) { } }
this . mOnDismissListener = onDismissListener ; this . mStyle . dismissTag = tag ; this . mStyle . dismissToken = token ; return this ;
public class EditorGridPanel { /** * set row value in active editor * @ param value value * @ return true or false */ public boolean setRowValue ( String value ) { } }
LOGGER . debug ( "setRowValue(" + value + ") - in active editor" ) ; TextField editor = getActiveEditor ( ) ; boolean edited = editor . setValue ( value ) ; if ( edited ) { editor . doBlur ( ) ; } return edited ;
public class UIMetricUtils { /** * get the specific task metric * @ param taskStreamMetrics raw metric info * @ param component component name * @ param id task id * @ param window window duration for metrics in seconds * @ return the task metric */ public static UITaskMetric getTaskMetric ( List < MetricInfo > taskStreamMetrics , String component , int id , int window ) { } }
UITaskMetric taskMetric = new UITaskMetric ( component , id ) ; if ( taskStreamMetrics . size ( ) > 1 ) { MetricInfo info = taskStreamMetrics . get ( 0 ) ; if ( info != null ) { for ( Map . Entry < String , Map < Integer , MetricSnapshot > > metric : info . get_metrics ( ) . entrySet ( ) ) { String name = metric . getKey ( ) ; String [ ] split_name = name . split ( "@" ) ; int taskId = JStormUtils . parseInt ( UIMetricUtils . extractTaskId ( split_name ) ) ; if ( taskId != id ) continue ; // only handle the specific task String metricName = UIMetricUtils . extractMetricName ( split_name ) ; String parentComp = null ; if ( metricName != null && metricName . contains ( "." ) ) { parentComp = metricName . split ( "\\." ) [ 0 ] ; metricName = metricName . split ( "\\." ) [ 1 ] ; } MetricSnapshot snapshot = metric . getValue ( ) . get ( window ) ; taskMetric . setMetricValue ( snapshot , parentComp , metricName ) ; } } } taskMetric . mergeValue ( ) ; return taskMetric ;
public class MPDDatabaseReader { /** * Populates a Map instance representing the IDs and names of * projects available in the current database . * @ return Map instance containing ID and name pairs * @ throws MPXJException */ public Map < Integer , String > listProjects ( ) throws MPXJException { } }
MPD9DatabaseReader reader = new MPD9DatabaseReader ( ) ; return reader . listProjects ( ) ;
public class AbstractBeanDefinition { /** * Configures the bean for the given { @ link BeanContext } . If the context features an * { @ link io . micronaut . context . env . Environment } this method configures the annotation metadata such that * environment aware values are returned . * @ param environment The environment */ @ Internal @ Override public final void configure ( Environment environment ) { } }
if ( environment != null ) { this . environment = environment ; if ( constructor instanceof EnvironmentConfigurable ) { ( ( EnvironmentConfigurable ) constructor ) . configure ( environment ) ; } for ( MethodInjectionPoint methodInjectionPoint : methodInjectionPoints ) { if ( methodInjectionPoint instanceof EnvironmentConfigurable ) { ( ( EnvironmentConfigurable ) methodInjectionPoint ) . configure ( environment ) ; } } for ( ExecutableMethod < T , ? > executableMethod : executableMethodMap . values ( ) ) { if ( executableMethod instanceof EnvironmentConfigurable ) { ( ( EnvironmentConfigurable ) executableMethod ) . configure ( environment ) ; } } }
public class HelpDesk { /** * Gets the group id . * @ return the group id */ public String getGroupId ( ) { } }
List < String > groups = USERS_GROUPS . get ( _userId ) ; return ( groups != null && groups . size ( ) > 0 ) ? groups . get ( 0 ) : null ;
public class TransformMatrixDictionary { /** * 获取转移频次 * @ param from * @ param to * @ return */ public int getFrequency ( E from , E to ) { } }
return matrix [ from . ordinal ( ) ] [ to . ordinal ( ) ] ;