signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class ShipmentUrl { /** * Get Resource Url for GetShipment * @ param orderId Unique identifier of the order . * @ param responseFields Filtering syntax appended to an API call to increase or decrease the amount of data returned inside a JSON object . This parameter should only be used to retrieve data . Attempting to update data using this parameter may cause data loss . * @ param shipmentId Unique identifier of the shipment to retrieve . * @ return String Resource Url */ public static MozuUrl getShipmentUrl ( String orderId , String responseFields , String shipmentId ) { } }
UrlFormatter formatter = new UrlFormatter ( "/api/commerce/orders/{orderId}/shipments/{shipmentId}?responseFields={responseFields}" ) ; formatter . formatUrl ( "orderId" , orderId ) ; formatter . formatUrl ( "responseFields" , responseFields ) ; formatter . formatUrl ( "shipmentId" , shipmentId ) ; return new MozuUrl ( formatter . getResourceUrl ( ) , MozuUrl . UrlLocation . TENANT_POD ) ;
public class FSDirectory { /** * Create FileStatus by file INode */ static FileStatus createFileStatus ( String path , INode node ) { } }
// length is zero for directories return new FileStatus ( node . isDirectory ( ) ? 0 : node . computeContentSummary ( ) . getLength ( ) , node . isDirectory ( ) , node . isDirectory ( ) ? 0 : ( ( INodeFile ) node ) . getReplication ( ) , node . isDirectory ( ) ? 0 : ( ( INodeFile ) node ) . getPreferredBlockSize ( ) , node . getModificationTime ( ) , node . getAccessTime ( ) , node . getFsPermission ( ) , node . getUserName ( ) , node . getGroupName ( ) , new Path ( path ) ) ;
public class PolynomialApproximation { /** * The object implements the readExternal method to restore its * contents by calling the methods of DataInput for primitive * types and readObject for objects , strings and arrays . The * readExternal method must read the values in the same sequence * and with the same types as were written by writeExternal . * @ param in the stream to read data from in order to restore the object */ @ Override public void readExternal ( ObjectInput in ) throws IOException { } }
b = new double [ in . readInt ( ) ] ; for ( int p = 0 ; p < b . length ; p ++ ) { b [ p ] = in . readDouble ( ) ; }
public class JdbcUtils { /** * Check if the specified table exists */ public static boolean doesTableExist ( JdbcOperations jdbcOperations , final String table ) { } }
final boolean tableExists = jdbcOperations . execute ( new ConnectionCallback < Boolean > ( ) { @ Override public Boolean doInConnection ( Connection con ) throws SQLException , DataAccessException { final DatabaseMetaData metaData = con . getMetaData ( ) ; final ResultSet tables = metaData . getTables ( null , null , null , new String [ ] { "TABLE" } ) ; while ( tables . next ( ) ) { final String dbTableName = tables . getString ( "TABLE_NAME" ) ; if ( table . equalsIgnoreCase ( dbTableName ) ) { return true ; } } return false ; } } ) ; return tableExists ;
public class PacketCapturesInner { /** * Gets a packet capture session by name . * @ param resourceGroupName The name of the resource group . * @ param networkWatcherName The name of the network watcher . * @ param packetCaptureName The name of the packet capture session . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ throws ErrorResponseException thrown if the request is rejected by server * @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @ return the PacketCaptureResultInner object if successful . */ public PacketCaptureResultInner get ( String resourceGroupName , String networkWatcherName , String packetCaptureName ) { } }
return getWithServiceResponseAsync ( resourceGroupName , networkWatcherName , packetCaptureName ) . toBlocking ( ) . single ( ) . body ( ) ;
public class AbstractList { /** * Sets the size of the receiver . * If the new size is greater than the current size , new null or zero items are added to the end of the receiver . * If the new size is less than the current size , all components at index newSize and greater are discarded . * This method does not release any superfluos internal memory . Use method < tt > trimToSize < / tt > to release superfluos internal memory . * @ param newSize the new size of the receiver . * @ throws IndexOutOfBoundsException if < tt > newSize & lt ; 0 < / tt > . */ public void setSize ( int newSize ) { } }
if ( newSize < 0 ) throw new IndexOutOfBoundsException ( "newSize:" + newSize ) ; int currentSize = size ( ) ; if ( newSize != currentSize ) { if ( newSize > currentSize ) beforeInsertDummies ( currentSize , newSize - currentSize ) ; else if ( newSize < currentSize ) removeFromTo ( newSize , currentSize - 1 ) ; }
public class Ifc2x3tc1PackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public EClass getIfcRelCoversBldgElements ( ) { } }
if ( ifcRelCoversBldgElementsEClass == null ) { ifcRelCoversBldgElementsEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( Ifc2x3tc1Package . eNS_URI ) . getEClassifiers ( ) . get ( 467 ) ; } return ifcRelCoversBldgElementsEClass ;
public class StackModel { /** * { @ inheritDoc } */ @ Override protected void initModel ( ) { } }
// Listen StackWaves to be aware of any changes listen ( new DefaultWaveChecker < > ( StackWaves . STACK_NAME , getStackName ( ) ) , StackWaves . SHOW_PAGE_MODEL ) ; listen ( new ClassWaveChecker < > ( StackWaves . PAGE_ENUM , getPageEnumClass ( ) ) , StackWaves . SHOW_PAGE_ENUM ) ;
public class Chain { /** * Register a user or other member type with the chain . * @ param registrationRequest Registration information . * @ throws RegistrationException if the registration fails * @ return */ public Member register ( RegistrationRequest registrationRequest ) throws RegistrationException { } }
Member member = getMember ( registrationRequest . getEnrollmentID ( ) ) ; member . register ( registrationRequest ) ; return member ;
public class PluginMessageDescription { /** * Create a description for a MissingCondition object . * @ param condition the condition * @ return a description to be used on email templates */ public String missing ( MissingCondition condition ) { } }
String description ; if ( condition . getContext ( ) != null && condition . getContext ( ) . get ( CONTEXT_PROPERTY_DESCRIPTION ) != null ) { description = condition . getContext ( ) . get ( CONTEXT_PROPERTY_DESCRIPTION ) ; } else { description = condition . getDataId ( ) ; } description += " not reported for " + condition . getInterval ( ) + "ms" ; return description ;
public class ImageGalleryFragment { /** * region ImageGalleryAdapter . ImageThumbnailLoader Methods */ @ Override public void loadImageThumbnail ( ImageView iv , String imageUrl , int dimension ) { } }
imageThumbnailLoader . loadImageThumbnail ( iv , imageUrl , dimension ) ;
public class DatabaseCollection { /** * Instantiate a new Database object . < br / > * Warning - Remember to call the init ( name ) method on the returned database object . * @ param iDBType The DB Type . * @ return The database . */ public BaseDatabase makeDatabase ( int iDatabaseType ) { } }
BaseDatabase database = null ; String strDbPrefix = this . getDatabasePrefix ( iDatabaseType ) ; if ( strDbPrefix != null ) if ( strDbPrefix . indexOf ( '.' ) == - 1 ) strDbPrefix = DBConstants . ROOT_PACKAGE + "base.db." + strDbPrefix . toLowerCase ( ) + "." + strDbPrefix + "Database" ; database = ( BaseDatabase ) ClassServiceUtility . getClassService ( ) . makeObjectFromClassName ( strDbPrefix ) ; if ( database == null ) database = new BaseDatabase ( ) ; // default return database ;
public class DoubleList { /** * Creates an returns an unmodifiable view of the given double array that requires * only a small object allocation . * @ param array the array to wrap into an unmodifiable list * @ param length the number of values of the array to use , starting from zero * @ return an unmodifiable list view of the array */ public static List < Double > unmodifiableView ( double [ ] array , int length ) { } }
return Collections . unmodifiableList ( view ( array , length ) ) ;
public class BuildContextGenerator { /** * Returns the predictive context used to determine how constituent at the * specified index should be combined with other contisuents . * @ param constituents * The constituents which have yet to be combined into new * constituents . * @ param index * The index of the constituent whcihi is being considered . * @ return the context for building constituents at the specified index . */ public String [ ] getContext ( final Parse [ ] constituents , final int index ) { } }
final List < String > features = new ArrayList < String > ( 100 ) ; final int ps = constituents . length ; // cons ( - 2 ) , cons ( - 1 ) , cons ( 0 ) , cons ( 1 ) , cons ( 2) // cons ( - 2) Parse p_2 = null ; Parse p_1 = null ; Parse p0 = null ; Parse p1 = null ; Parse p2 = null ; Collection < Parse > punct1s = null ; Collection < Parse > punct2s = null ; Collection < Parse > punct_1s = null ; Collection < Parse > punct_2s = null ; if ( index - 2 >= 0 ) { p_2 = constituents [ index - 2 ] ; } if ( index - 1 >= 0 ) { p_1 = constituents [ index - 1 ] ; punct_2s = p_1 . getPreviousPunctuationSet ( ) ; } p0 = constituents [ index ] ; punct_1s = p0 . getPreviousPunctuationSet ( ) ; punct1s = p0 . getNextPunctuationSet ( ) ; if ( index + 1 < ps ) { p1 = constituents [ index + 1 ] ; punct2s = p1 . getNextPunctuationSet ( ) ; } if ( index + 2 < ps ) { p2 = constituents [ index + 2 ] ; } boolean u_2 = true ; boolean u_1 = true ; boolean u0 = true ; boolean u1 = true ; boolean u2 = true ; boolean b_2_1 = true ; boolean b_10 = true ; boolean b01 = true ; boolean b12 = true ; boolean t_2_10 = true ; boolean t_101 = true ; boolean t012 = true ; if ( this . dict != null ) { if ( p_2 != null ) { this . unigram [ 0 ] = p_2 . getHead ( ) . getCoveredText ( ) ; u_2 = this . dict . contains ( new StringList ( this . unigram ) ) ; } if ( p2 != null ) { this . unigram [ 0 ] = p2 . getHead ( ) . getCoveredText ( ) ; u2 = this . dict . contains ( new StringList ( this . unigram ) ) ; } this . unigram [ 0 ] = p0 . getHead ( ) . getCoveredText ( ) ; u0 = this . dict . contains ( new StringList ( this . unigram ) ) ; if ( p_2 != null && p_1 != null ) { this . bigram [ 0 ] = p_2 . getHead ( ) . getCoveredText ( ) ; this . bigram [ 1 ] = p_1 . getHead ( ) . getCoveredText ( ) ; b_2_1 = this . dict . contains ( new StringList ( this . bigram ) ) ; this . trigram [ 0 ] = p_2 . getHead ( ) . getCoveredText ( ) ; this . trigram [ 1 ] = p_1 . getHead ( ) . getCoveredText ( ) ; this . trigram [ 2 ] = p0 . getHead ( ) . getCoveredText ( ) ; t_2_10 = this . dict . contains ( new StringList ( this . trigram ) ) ; } if ( p_1 != null && p1 != null ) { this . trigram [ 0 ] = p_1 . getHead ( ) . getCoveredText ( ) ; this . trigram [ 1 ] = p0 . getHead ( ) . getCoveredText ( ) ; this . trigram [ 2 ] = p1 . getHead ( ) . getCoveredText ( ) ; t_101 = this . dict . contains ( new StringList ( this . trigram ) ) ; } if ( p_1 != null ) { this . unigram [ 0 ] = p_1 . getHead ( ) . getCoveredText ( ) ; u_1 = this . dict . contains ( new StringList ( this . unigram ) ) ; // extra check for 2 = = null case b_2_1 = b_2_1 && u_1 & u_2 ; t_2_10 = t_2_10 && u_1 & u_2 & u0 ; t_101 = t_101 && u_1 & u0 && u1 ; this . bigram [ 0 ] = p_1 . getHead ( ) . getCoveredText ( ) ; this . bigram [ 1 ] = p0 . getHead ( ) . getCoveredText ( ) ; b_10 = this . dict . contains ( new StringList ( this . bigram ) ) && u_1 && u0 ; } if ( p1 != null && p2 != null ) { this . bigram [ 0 ] = p1 . getHead ( ) . getCoveredText ( ) ; this . bigram [ 1 ] = p2 . getHead ( ) . getCoveredText ( ) ; b12 = this . dict . contains ( new StringList ( this . bigram ) ) ; this . trigram [ 0 ] = p0 . getHead ( ) . getCoveredText ( ) ; this . trigram [ 1 ] = p1 . getHead ( ) . getCoveredText ( ) ; this . trigram [ 2 ] = p2 . getHead ( ) . getCoveredText ( ) ; t012 = this . dict . contains ( new StringList ( this . trigram ) ) ; } if ( p1 != null ) { this . unigram [ 0 ] = p1 . getHead ( ) . getCoveredText ( ) ; u1 = this . dict . contains ( new StringList ( this . unigram ) ) ; // extra check for 2 = = null case b12 = b12 && u1 && u2 ; t012 = t012 && u1 && u2 && u0 ; t_101 = t_101 && u0 && u_1 && u1 ; this . bigram [ 0 ] = p0 . getHead ( ) . getCoveredText ( ) ; this . bigram [ 1 ] = p1 . getHead ( ) . getCoveredText ( ) ; b01 = this . dict . contains ( new StringList ( this . bigram ) ) ; b01 = b01 && u0 && u1 ; } } final String consp_2 = cons ( p_2 , - 2 ) ; final String consp_1 = cons ( p_1 , - 1 ) ; final String consp0 = cons ( p0 , 0 ) ; final String consp1 = cons ( p1 , 1 ) ; final String consp2 = cons ( p2 , 2 ) ; final String consbop_2 = consbo ( p_2 , - 2 ) ; final String consbop_1 = consbo ( p_1 , - 1 ) ; final String consbop0 = consbo ( p0 , 0 ) ; final String consbop1 = consbo ( p1 , 1 ) ; final String consbop2 = consbo ( p2 , 2 ) ; final Cons c_2 = new Cons ( consp_2 , consbop_2 , - 2 , u_2 ) ; final Cons c_1 = new Cons ( consp_1 , consbop_1 , - 1 , u_1 ) ; final Cons c0 = new Cons ( consp0 , consbop0 , 0 , u0 ) ; final Cons c1 = new Cons ( consp1 , consbop1 , 1 , u1 ) ; final Cons c2 = new Cons ( consp2 , consbop2 , 2 , u2 ) ; // default features . add ( "default" ) ; // first constituent label // features . add ( " fl = " + constituents [ 0 ] . getLabel ( ) ) ; // features . add ( " stage = cons ( i ) " ) ; // cons ( - 2 ) , cons ( - 1 ) , cons ( 0 ) , cons ( 1 ) , cons ( 2) if ( u0 ) { features . add ( consp0 ) ; } features . add ( consbop0 ) ; if ( u_2 ) { features . add ( consp_2 ) ; } features . add ( consbop_2 ) ; if ( u_1 ) { features . add ( consp_1 ) ; } features . add ( consbop_1 ) ; if ( u1 ) { features . add ( consp1 ) ; } features . add ( consbop1 ) ; if ( u2 ) { features . add ( consp2 ) ; } features . add ( consbop2 ) ; // cons ( 0 ) , cons ( 1) cons2 ( features , c0 , c1 , punct1s , b01 ) ; // cons ( - 1 ) , cons ( 0) cons2 ( features , c_1 , c0 , punct_1s , b_10 ) ; // features . add ( " stage = cons ( 0 ) , cons ( 1 ) , cons ( 2 ) " ) ; cons3 ( features , c0 , c1 , c2 , punct1s , punct2s , t012 , b01 , b12 ) ; cons3 ( features , c_2 , c_1 , c0 , punct_2s , punct_1s , t_2_10 , b_2_1 , b_10 ) ; cons3 ( features , c_1 , c0 , c1 , punct_1s , punct1s , t_101 , b_10 , b01 ) ; // features . add ( " stage = other " ) ; final String p0Tag = p0 . getType ( ) ; if ( p0Tag . equals ( "-RRB-" ) ) { for ( int pi = index - 1 ; pi >= 0 ; pi -- ) { final Parse p = constituents [ pi ] ; if ( p . getType ( ) . equals ( "-LRB-" ) ) { features . add ( "bracketsmatch" ) ; break ; } if ( p . getLabel ( ) . endsWith ( BioCodec . START ) ) { break ; } } } if ( p0Tag . equals ( "-RCB-" ) ) { for ( int pi = index - 1 ; pi >= 0 ; pi -- ) { final Parse p = constituents [ pi ] ; if ( p . getType ( ) . equals ( "-LCB-" ) ) { features . add ( "bracketsmatch" ) ; break ; } if ( p . getLabel ( ) . endsWith ( BioCodec . START ) ) { break ; } } } if ( p0Tag . equals ( "''" ) ) { for ( int pi = index - 1 ; pi >= 0 ; pi -- ) { final Parse p = constituents [ pi ] ; if ( p . getType ( ) . equals ( "``" ) ) { features . add ( "quotesmatch" ) ; break ; } if ( p . getLabel ( ) . endsWith ( BioCodec . START ) ) { break ; } } } if ( p0Tag . equals ( "'" ) ) { for ( int pi = index - 1 ; pi >= 0 ; pi -- ) { final Parse p = constituents [ pi ] ; if ( p . getType ( ) . equals ( "`" ) ) { features . add ( "quotesmatch" ) ; break ; } if ( p . getLabel ( ) . endsWith ( BioCodec . START ) ) { break ; } } } if ( p0Tag . equals ( "," ) ) { for ( int pi = index - 1 ; pi >= 0 ; pi -- ) { final Parse p = constituents [ pi ] ; if ( p . getType ( ) . equals ( "," ) ) { features . add ( "iscomma" ) ; break ; } if ( p . getLabel ( ) . endsWith ( BioCodec . START ) ) { break ; } } } if ( p0Tag . equals ( "." ) && index == ps - 1 ) { for ( int pi = index - 1 ; pi >= 0 ; pi -- ) { final Parse p = constituents [ pi ] ; if ( p . getLabel ( ) . endsWith ( BioCodec . START ) ) { if ( pi == 0 ) { features . add ( "endofsentence" ) ; } break ; } } } return features . toArray ( new String [ features . size ( ) ] ) ;
public class CommercePriceListLocalServiceUtil { /** * Adds the commerce price list to the database . Also notifies the appropriate model listeners . * @ param commercePriceList the commerce price list * @ return the commerce price list that was added */ public static com . liferay . commerce . price . list . model . CommercePriceList addCommercePriceList ( com . liferay . commerce . price . list . model . CommercePriceList commercePriceList ) { } }
return getService ( ) . addCommercePriceList ( commercePriceList ) ;
public class StandardCache { /** * Removes a collection of objects from cache . * @ param keys object keys */ public void clear ( Collection keys ) { } }
synchronized ( mirror ) { synchronized ( data ) { Iterator i = keys . iterator ( ) ; while ( i . hasNext ( ) ) { Object key = i . next ( ) ; data . remove ( key ) ; mirror . remove ( key ) ; } } }
public class FlowGraphPath { /** * A method to modify the { @ link ConfigurationKeys # JOB _ DEPENDENCIES } specified in a { @ link JobTemplate } to those * which are usable in a { @ link JobSpec } . * The { @ link ConfigurationKeys # JOB _ DEPENDENCIES } specified in a JobTemplate use the JobTemplate names * ( i . e . the file names of the templates without the extension ) . However , the same { @ link FlowTemplate } may be used * across multiple { @ link FlowEdge } s . To ensure that we capture dependencies between jobs correctly as Dags from * successive hops are merged , we translate the { @ link JobTemplate } name specified in the dependencies config to * { @ link ConfigurationKeys # JOB _ NAME _ KEY } from the corresponding { @ link JobSpec } , which is guaranteed to be globally unique . * For example , consider a { @ link JobTemplate } with URI job1 . job which has " job . dependencies = job2 , job3 " ( where job2 . job and job3 . job are * URIs of other { @ link JobTemplate } s ) . Also , let the job . name config for the three jobs ( after { @ link JobSpec } is compiled ) be as follows : * " job . name = flowgrp1 _ flowName1 _ jobName1_1111 " , " job . name = flowgrp1 _ flowName1 _ jobName2_1121 " , and " job . name = flowgrp1 _ flowName1 _ jobName3_1131 " . Then , * for job1 , this method will set " job . dependencies = flowgrp1 _ flowName1 _ jobName2_1121 , flowgrp1 _ flowName1 _ jobName3_1131 " . * @ param jobExecutionPlans a list of { @ link JobExecutionPlan } s * @ param templateToJobNameMap a HashMap that has the mapping from the { @ link JobTemplate } names to job . name in corresponding * { @ link JobSpec } */ private void updateJobDependencies ( List < JobExecutionPlan > jobExecutionPlans , Map < String , String > templateToJobNameMap ) { } }
for ( JobExecutionPlan jobExecutionPlan : jobExecutionPlans ) { JobSpec jobSpec = jobExecutionPlan . getJobSpec ( ) ; List < String > updatedDependenciesList = new ArrayList < > ( ) ; if ( jobSpec . getConfig ( ) . hasPath ( ConfigurationKeys . JOB_DEPENDENCIES ) ) { for ( String dependency : ConfigUtils . getStringList ( jobSpec . getConfig ( ) , ConfigurationKeys . JOB_DEPENDENCIES ) ) { if ( ! templateToJobNameMap . containsKey ( dependency ) ) { // We should never hit this condition . The logic here is a safety check . throw new RuntimeException ( "TemplateToJobNameMap does not contain dependency " + dependency ) ; } updatedDependenciesList . add ( templateToJobNameMap . get ( dependency ) ) ; } String updatedDependencies = Joiner . on ( "," ) . join ( updatedDependenciesList ) ; jobSpec . setConfig ( jobSpec . getConfig ( ) . withValue ( ConfigurationKeys . JOB_DEPENDENCIES , ConfigValueFactory . fromAnyRef ( updatedDependencies ) ) ) ; } }
public class ConnectionDescriptorXmlHandler { /** * endElement callback . most elements are build up from here . */ public void endElement ( String uri , String name , String qName ) { } }
boolean isDebug = logger . isDebugEnabled ( ) ; try { switch ( getLiteralId ( qName ) ) { case MAPPING_REPOSITORY : { currentAttributeContainer = null ; break ; } case CLASS_DESCRIPTOR : { currentAttributeContainer = null ; break ; } case JDBC_CONNECTION_DESCRIPTOR : { logger . debug ( " < " + tags . getTagById ( JDBC_CONNECTION_DESCRIPTOR ) ) ; m_CurrentJCD = null ; currentAttributeContainer = null ; break ; } case CONNECTION_POOL : { logger . debug ( " < " + tags . getTagById ( CONNECTION_POOL ) ) ; currentAttributeContainer = m_CurrentJCD ; break ; } case SEQUENCE_MANAGER : { if ( isDebug ) logger . debug ( " < " + tags . getTagById ( SEQUENCE_MANAGER ) ) ; // set to null at the end of the tag ! ! this . currentSequenceDescriptor = null ; currentAttributeContainer = m_CurrentJCD ; break ; } case OBJECT_CACHE : { if ( currentAttributeContainer != null ) { if ( isDebug ) logger . debug ( " < " + tags . getTagById ( OBJECT_CACHE ) ) ; // set to null or previous element level at the end of the tag ! ! currentAttributeContainer = m_CurrentJCD ; } break ; } case ATTRIBUTE : { if ( currentAttributeContainer != null ) { if ( isDebug ) logger . debug ( " < " + tags . getTagById ( ATTRIBUTE ) ) ; } break ; } default : { // noop } } } catch ( Exception ex ) { logger . error ( ex ) ; throw new PersistenceBrokerException ( ex ) ; }
public class DatasetKeyOutputFormat { /** * The job dataset may already exist if the ApplicationMaster was restarted */ @ SuppressWarnings ( "unchecked" ) private static < E > Dataset < E > loadOrCreateJobDataset ( JobContext jobContext ) { } }
Dataset < Object > dataset = load ( jobContext ) . getDataset ( ) ; String jobDatasetName = getJobDatasetName ( jobContext ) ; DatasetRepository repo = getDatasetRepository ( jobContext ) ; if ( repo . exists ( TEMP_NAMESPACE , jobDatasetName ) ) { Dataset < E > tempDataset = repo . load ( TEMP_NAMESPACE , jobDatasetName , DatasetKeyOutputFormat . < E > getType ( jobContext ) ) ; try { Compatibility . checkCompatible ( dataset . getDescriptor ( ) , tempDataset . getDescriptor ( ) ) ; return tempDataset ; } catch ( RuntimeException ex ) { // swallow } } return repo . create ( TEMP_NAMESPACE , jobDatasetName , copy ( dataset . getDescriptor ( ) ) , DatasetKeyOutputFormat . < E > getType ( jobContext ) ) ;
public class TargetMetadataDetailsLayout { /** * Populate target metadata . * @ param target */ public void populateMetadata ( final Target target ) { } }
removeAllItems ( ) ; if ( target == null ) { return ; } selectedTargetId = target . getId ( ) ; final List < TargetMetadata > targetMetadataList = targetManagement . findMetaDataByControllerId ( PageRequest . of ( 0 , MAX_METADATA_QUERY ) , target . getControllerId ( ) ) . getContent ( ) ; if ( targetMetadataList != null && ! targetMetadataList . isEmpty ( ) ) { targetMetadataList . forEach ( this :: setMetadataProperties ) ; }
public class AmazonIdentityManagementAsyncClient { /** * Simplified method form for invoking the ListSAMLProviders operation with an AsyncHandler . * @ see # listSAMLProvidersAsync ( ListSAMLProvidersRequest , com . amazonaws . handlers . AsyncHandler ) */ @ Override public java . util . concurrent . Future < ListSAMLProvidersResult > listSAMLProvidersAsync ( com . amazonaws . handlers . AsyncHandler < ListSAMLProvidersRequest , ListSAMLProvidersResult > asyncHandler ) { } }
return listSAMLProvidersAsync ( new ListSAMLProvidersRequest ( ) , asyncHandler ) ;
public class SearchStatusMarshaller { /** * Marshall the given parameter object . */ public void marshall ( SearchStatus searchStatus , ProtocolMarshaller protocolMarshaller ) { } }
if ( searchStatus == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( searchStatus . getTimems ( ) , TIMEMS_BINDING ) ; protocolMarshaller . marshall ( searchStatus . getRid ( ) , RID_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class ClusterMapActivity { /** * onOptionsItemSelected handler * since clustering need MapView to be created and visible , * this sample do clustering here . */ @ Override public boolean onOptionsItemSelected ( MenuItem item ) { } }
super . onOptionsItemSelected ( item ) ; switch ( item . getItemId ( ) ) { case 1234 : if ( clusterer != null ) { break ; } // create clusterer instance clusterer = new ClusterManager ( mapView , getMarkerBitmap ( ) , getZoomLevelMax ( ) , false ) ; // Create a Toast , see e . g . http : / / www . mkyong . com / android / android - toast - example / Toast toast = Toast . makeText ( this , "" , Toast . LENGTH_LONG ) ; ClusterManager . setToast ( toast ) ; // this uses the framebuffer position , the mapview position can be out of sync with // what the user sees on the screen if an animation is in progress this . mapView . getModel ( ) . frameBufferModel . addObserver ( clusterer ) ; // add geoitems for clustering for ( int i = 0 ; i < geoItems . length ; i ++ ) { clusterer . addItem ( geoItems [ i ] ) ; } // now redraw the cluster . it will create markers . clusterer . redraw ( ) ; displayItems . setEnabled ( false ) ; displayMoreItems . setEnabled ( true ) ; hideItems . setEnabled ( true ) ; // now you can see items clustered on the map . // zoom in / out to see how icons change . break ; case 5678 : setProgressBarIndeterminateVisibility ( true ) ; Handler myHandler = new Handler ( ) { @ Override public void handleMessage ( Message msg ) { switch ( msg . what ) { case 0 : // calling to this function from other places // The notice call method of doing things addMarker ( ) ; break ; default : break ; } } } ; new ManyDummyContent ( myHandler ) ; item . setEnabled ( false ) ; break ; case 9012 : if ( clusterer != null ) { clusterer . destroyGeoClusterer ( ) ; this . mapView . getModel ( ) . frameBufferModel . removeObserver ( clusterer ) ; clusterer = null ; } displayItems . setEnabled ( true ) ; displayMoreItems . setEnabled ( false ) ; hideItems . setEnabled ( false ) ; break ; } return true ;
public class CheckMysql { /** * Execute and gather metrics . * @ param cl * - The command line parameters * @ throws MetricGatheringException * - If any error occurs during metric gathering process * @ return the gathered metrics */ @ Override public final Collection < Metric > gatherMetrics ( final ICommandLine cl ) throws MetricGatheringException { } }
List < Metric > metrics = new ArrayList < Metric > ( ) ; Mysql mysql = new Mysql ( cl ) ; long start = System . currentTimeMillis ( ) ; long elapsed = 0L ; Connection conn = null ; try { conn = mysql . getConnection ( ) ; elapsed = ( System . currentTimeMillis ( ) - start ) / 1000L ; } catch ( ClassNotFoundException e ) { LOG . error ( getContext ( ) , "Mysql driver library not found into the classpath: " + "download and put it in the same directory " + "of this plugin" ) ; throw new MetricGatheringException ( "Error accessing the MySQL server " + "- JDBC driver not installed" , Status . CRITICAL , e ) ; } catch ( Exception e ) { LOG . error ( getContext ( ) , "Error accessing the MySQL server" , e ) ; throw new MetricGatheringException ( "Error accessing the MySQL server - " + e . getMessage ( ) , Status . CRITICAL , e ) ; } if ( cl . hasOption ( "check-slave" ) ) { metrics . add ( checkSlave ( cl , mysql , conn ) ) ; } else { metrics . add ( new Metric ( "time" , "Connection took " + elapsed + " secs. " , new BigDecimal ( elapsed ) , new BigDecimal ( 0 ) , null ) ) ; } mysql . closeConnection ( conn ) ; return metrics ;
public class SnapshotHistory { /** * / * ( non - Javadoc ) * @ see java . util . Comparator # compare ( java . lang . Object , java . lang . Object ) */ @ Override public int compare ( SnapshotHistory o1 , SnapshotHistory o2 ) { } }
return o1 . history . compareTo ( o2 . history ) ;
public class PropertiesView { /** * { @ inheritDoc } */ @ Override protected void initView ( ) { } }
node ( ) . setMinWidth ( 200 ) ; final HBox hbox = HBoxBuilder . create ( ) . build ( ) ; final Label label = new Label ( "Node Name :" ) ; this . nodeName = new Text ( ) ; hbox . getChildren ( ) . addAll ( label , this . nodeName ) ; node ( ) . getChildren ( ) . add ( hbox ) ;
public class BackendCleanup { /** * Reset data in order to to be in same state as a fresh installation ( but without having to drop db and restart the server ) . * Please be careful when updating this method as it ' s called by Orchestrator . */ public void resetData ( ) { } }
try ( DbSession dbSession = dbClient . openSession ( false ) ; Connection connection = dbSession . getConnection ( ) ) { truncateAnalysisTables ( connection ) ; deleteManualRules ( connection ) ; truncateInternalProperties ( null , null , connection ) ; truncateUsers ( null , null , connection ) ; truncateOrganizations ( null , null , connection ) ; } catch ( SQLException e ) { throw new IllegalStateException ( "Fail to reset data" , e ) ; } clearIndex ( IssueIndexDefinition . DESCRIPTOR ) ; clearIndex ( ViewIndexDefinition . DESCRIPTOR ) ; clearIndex ( ProjectMeasuresIndexDefinition . DESCRIPTOR ) ; clearIndex ( ComponentIndexDefinition . DESCRIPTOR ) ;
public class CoreCondo { /** * Defer the given action . * @ param metadata metadata associated with the action * @ param action action to defer * @ param future future that will be bound to the action * @ param < T > return type of the action * @ return a deferred action container */ private < T > DeferredAction < M > deferAction ( final M metadata , final Supplier < ? extends CompletionStage < T > > action , final CompletableFuture < T > future ) { } }
return new DeferredAction < > ( metadata , ( ) -> { final CompletionStage < ? extends T > resultFuture ; try { resultFuture = action . get ( ) ; } catch ( final Exception e ) { future . completeExceptionally ( e ) ; markProcessed ( metadata ) ; return ; } resultFuture . handleAsync ( ( result , e ) -> { if ( e != null ) { future . completeExceptionally ( e ) ; } else { future . complete ( result ) ; } markProcessed ( metadata ) ; return null ; } , executor ) ; } ) ;
public class Configuration { /** * Get the configuration properties as argument list as expected by the Talend job . If the properties * were not yet set , wait the time specified by the { @ link # setTimeout ( long ) timeout property } and return * empty argument list if properties still not specified . If < code > timeout < = 0 < / code > the method * immediately returns . * @ return the argument list , never < code > null < / code > */ public String [ ] awaitArguments ( ) throws InterruptedException { } }
if ( configAvailable . await ( timeout , TimeUnit . MILLISECONDS ) ) { return argumentList . toArray ( new String [ argumentList . size ( ) ] ) ; } else { LOG . warning ( "ConfigAdmin did not pass any properties yet, returning an empty argument list." ) ; return EMPTY_ARGUMENTS ; }
public class CglibLazyInitializer { /** * ( non - Javadoc ) * @ see net . sf . cglib . proxy . InvocationHandler # invoke ( java . lang . Object , * java . lang . reflect . Method , java . lang . Object [ ] ) */ @ Override public Object invoke ( Object proxy , Method method , Object [ ] args ) throws Throwable { } }
if ( constructed ) { String methodName = method . getName ( ) ; int params = args . length ; if ( params == 0 ) { if ( isUninitialized ( ) && method . equals ( getIdentifierMethod ) ) { return getIdentifier ( ) ; } else if ( "getKunderaLazyInitializer" . equals ( methodName ) ) { return this ; } } Object target = getImplementation ( ) ; String [ ] strArr = entityName . split ( "#" ) ; String fieldName = strArr [ 1 ] ; if ( owner != null ) { EntityMetadata m = KunderaMetadataManager . getEntityMetadata ( persistenceDelegator . getKunderaMetadata ( ) , owner . getClass ( ) ) ; Relation r = m . getRelation ( fieldName ) ; if ( r != null ) { PropertyAccessorHelper . set ( owner , r . getProperty ( ) , target ) ; } if ( r . getBiDirectionalField ( ) != null && method . getReturnType ( ) . equals ( m . getEntityClazz ( ) ) ) { PropertyAccessorHelper . set ( target , r . getBiDirectionalField ( ) , owner ) ; } } try { final Object returnValue ; if ( method . isAccessible ( ) ) { if ( ! method . getDeclaringClass ( ) . isInstance ( target ) ) { throw new ClassCastException ( target . getClass ( ) . getName ( ) ) ; } returnValue = method . invoke ( target , args ) ; } else { if ( ! method . isAccessible ( ) ) { method . setAccessible ( true ) ; } returnValue = method . invoke ( target , args ) ; } return ( ( returnValue == target ) ? proxy : returnValue ) ; } catch ( InvocationTargetException ite ) { throw new LazyInitializationException ( ite ) ; } } else { // while constructor is running throw new LazyInitializationException ( "unexpected case hit, method=" + method . getName ( ) ) ; }
public class KeyValueHandler { /** * Releasing the content of requests that are to be cancelled . * @ param request the request to side effect on . */ @ Override protected void sideEffectRequestToCancel ( final BinaryRequest request ) { } }
super . sideEffectRequestToCancel ( request ) ; if ( request instanceof BinaryStoreRequest ) { ( ( BinaryStoreRequest ) request ) . content ( ) . release ( ) ; } else if ( request instanceof AppendRequest ) { ( ( AppendRequest ) request ) . content ( ) . release ( ) ; } else if ( request instanceof PrependRequest ) { ( ( PrependRequest ) request ) . content ( ) . release ( ) ; }
public class DeleteSlotTypeVersionRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( DeleteSlotTypeVersionRequest deleteSlotTypeVersionRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( deleteSlotTypeVersionRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( deleteSlotTypeVersionRequest . getName ( ) , NAME_BINDING ) ; protocolMarshaller . marshall ( deleteSlotTypeVersionRequest . getVersion ( ) , VERSION_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class ExtensionActiveScan { /** * Shows the active scan dialogue with the given target , if not already visible . * @ param target the target , might be { @ code null } . * @ since TODO add version . */ public void showCustomScanDialog ( Target target ) { } }
if ( customScanDialog == null ) { // Work out the tabs String [ ] tabs = CustomScanDialog . STD_TAB_LABELS ; if ( this . customScanPanels . size ( ) > 0 ) { List < String > tabList = new ArrayList < String > ( ) ; for ( String str : CustomScanDialog . STD_TAB_LABELS ) { tabList . add ( str ) ; } for ( CustomScanPanel csp : customScanPanels ) { tabList . add ( csp . getLabel ( ) ) ; } tabs = tabList . toArray ( new String [ tabList . size ( ) ] ) ; } customScanDialog = new CustomScanDialog ( this , tabs , this . customScanPanels , View . getSingleton ( ) . getMainFrame ( ) , new Dimension ( 700 , 500 ) ) ; } if ( customScanDialog . isVisible ( ) ) { customScanDialog . requestFocus ( ) ; // Its behind you ! Actually not needed no the window is alwaysOnTop , but keeping in case we change that ; ) customScanDialog . toFront ( ) ; return ; } if ( target != null ) { customScanDialog . init ( target ) ; } else { // Keep the previously selected target customScanDialog . init ( null ) ; } customScanDialog . setVisible ( true ) ;
public class AttributeType { /** * Returns for given parameter < i > _ name < / i > the instance of class * { @ link AttributeType } . * @ param _ name name to search in the cache * @ return instance of class { @ link AttributeType } * @ see # CACHE * @ throws CacheReloadException on error */ public static AttributeType get ( final String _name ) throws CacheReloadException { } }
final Cache < String , AttributeType > cache = InfinispanCache . get ( ) . < String , AttributeType > getCache ( AttributeType . NAMECACHE ) ; if ( ! cache . containsKey ( _name ) ) { AttributeType . getAttributeTypeFromDB ( AttributeType . SQL_NAME , _name ) ; } return cache . get ( _name ) ;
public class SARLCodeMiningProvider { /** * Add an annotation when the field ' s type is implicit and inferred by the SARL compiler . * @ param resource the resource to parse . * @ param acceptor the code mining acceptor . */ private void createImplicitFieldType ( XtextResource resource , IAcceptor < ? super ICodeMining > acceptor ) { } }
createImplicitVarValType ( resource , acceptor , XtendField . class , it -> it . getType ( ) , it -> { final JvmField inferredField = ( JvmField ) this . jvmModelAssocitions . getPrimaryJvmElement ( it ) ; if ( inferredField == null || inferredField . getType ( ) == null || inferredField . getType ( ) . eIsProxy ( ) ) { return null ; } return inferredField . getType ( ) . getSimpleName ( ) ; } , null , ( ) -> this . grammar . getAOPMemberAccess ( ) . getInitialValueAssignment_2_3_3_1 ( ) ) ;
public class RegisteredServiceCouchDbRepository { /** * Update a record without revision checks . * @ param record record to be updated */ @ UpdateHandler ( name = "update_record" , file = "RegisteredServiceDocument_update.js" ) public void updateRecord ( final RegisteredServiceDocument record ) { } }
if ( record . getId ( ) == null ) { add ( record ) ; } else { db . callUpdateHandler ( stdDesignDocumentId , "update_record" , record . getId ( ) , CollectionUtils . wrap ( "doc" , record ) ) ; }
public class DeleteRepositoryRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( DeleteRepositoryRequest deleteRepositoryRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( deleteRepositoryRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( deleteRepositoryRequest . getRepositoryName ( ) , REPOSITORYNAME_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class CmsLogChannelTable { /** * Simple check if the logger has the global log file < p > or a single one . * @ param logchannel the channel that has do be checked * @ return true if the the log channel has a single log file */ protected boolean isloggingactivated ( Logger logchannel ) { } }
boolean check = false ; for ( Appender appender : logchannel . getAppenders ( ) . values ( ) ) { check = appender . getName ( ) . equals ( logchannel . getName ( ) ) ; } return check ;
public class FileUtils { /** * Get a directory for a given resource identifier . * @ param baseDirectory the base directory * @ param identifier a resource identifier * @ return a directory */ public static File getResourceDirectory ( final File baseDirectory , final IRI identifier ) { } }
requireNonNull ( baseDirectory , "The baseDirectory may not be null!" ) ; requireNonNull ( identifier , "The identifier may not be null!" ) ; final String id = identifier . getIRIString ( ) ; final StringJoiner joiner = new StringJoiner ( separator ) ; final CRC32 hasher = new CRC32 ( ) ; hasher . update ( id . getBytes ( UTF_8 ) ) ; final String intermediate = Long . toHexString ( hasher . getValue ( ) ) ; range ( 0 , intermediate . length ( ) / LENGTH ) . limit ( MAX ) . forEach ( i -> joiner . add ( intermediate . substring ( i * LENGTH , ( i + 1 ) * LENGTH ) ) ) ; joiner . add ( md5Hex ( id ) ) ; return new File ( baseDirectory , joiner . toString ( ) ) ;
public class Counters { /** * Returns a new Counter which is the input counter with log tf scaling * @ param c * The counter to scale . It is not changed * @ param base * The base of the logarithm used for tf scaling by 1 + log tf * @ return A new Counter which is the argument scaled by the given scale * factor . */ @ SuppressWarnings ( "unchecked" ) public static < E , C extends Counter < E > > C tfLogScale ( C c , double base ) { } }
C scaled = ( C ) c . getFactory ( ) . create ( ) ; for ( E key : c . keySet ( ) ) { double cnt = c . getCount ( key ) ; double scaledCnt = 0.0 ; if ( cnt > 0 ) { scaledCnt = 1.0 + SloppyMath . log ( cnt , base ) ; } scaled . setCount ( key , scaledCnt ) ; } return scaled ;
public class version_matrix_file { /** * < pre > * Performs generic data validation for the operation to be performed * < / pre > */ protected void validate ( String operationType ) throws Exception { } }
super . validate ( operationType ) ; MPSString file_name_validator = new MPSString ( ) ; file_name_validator . setConstraintIsReq ( MPSConstants . DELETE_CONSTRAINT , true ) ; file_name_validator . setConstraintMaxStrLen ( MPSConstants . GENERIC_CONSTRAINT , 256 ) ; file_name_validator . setConstraintMinStrLen ( MPSConstants . GENERIC_CONSTRAINT , 1 ) ; file_name_validator . setConstraintIsReq ( MPSConstants . MODIFY_CONSTRAINT , true ) ; file_name_validator . validate ( operationType , file_name , "\"file_name\"" ) ; MPSString file_last_modified_validator = new MPSString ( ) ; file_last_modified_validator . setConstraintMaxStrLen ( MPSConstants . GENERIC_CONSTRAINT , 128 ) ; file_last_modified_validator . setConstraintMinStrLen ( MPSConstants . GENERIC_CONSTRAINT , 1 ) ; file_last_modified_validator . validate ( operationType , file_last_modified , "\"file_last_modified\"" ) ; MPSString formation_name_validator = new MPSString ( ) ; formation_name_validator . validate ( operationType , formation_name , "\"formation_name\"" ) ; MPSString formation_version_validator = new MPSString ( ) ; formation_version_validator . validate ( operationType , formation_version , "\"formation_version\"" ) ;
public class WorkbookCreationHelper { /** * Write the current workbook to a writable resource . * @ param aRes * The resource to write to . May not be < code > null < / code > . * @ return { @ link ESuccess } */ @ Nonnull public ESuccess writeTo ( @ Nonnull final IWritableResource aRes ) { } }
return writeTo ( aRes . getOutputStream ( EAppend . TRUNCATE ) ) ;
public class Cell { /** * Sets the maxWidth and maxHeight to the specified values . */ public Cell < C , T > maxSize ( float width , float height ) { } }
maxWidth = new FixedValue < C , T > ( layout . toolkit , width ) ; maxHeight = new FixedValue < C , T > ( layout . toolkit , height ) ; return this ;
public class IndentPrinter { /** * Prints the current indent level . */ public void printIndent ( ) { } }
for ( int i = 0 ; i < indentLevel ; i ++ ) { try { out . write ( indent ) ; } catch ( IOException ioe ) { throw new GroovyRuntimeException ( ioe ) ; } }
public class BaseFolderGridScreen { /** * Process the command . * < br / > Step 1 - Process the command if possible and return true if processed . * < br / > Step 2 - If I can ' t process , pass to all children ( with me as the source ) . * < br / > Step 3 - If children didn ' t process , pass to parent ( with me as the source ) . * < br / > Note : Never pass to a parent or child that matches the source ( to avoid an endless loop ) . * @ param strCommand The command to process . * @ param sourceSField The source screen field ( to avoid echos ) . * @ param iCommandOptions If this command creates a new screen , create in a new window ? * @ return true if success . */ public boolean doCommand ( String strCommand , ScreenField sourceSField , int iCommandOptions ) { } }
if ( strCommand . equalsIgnoreCase ( MenuConstants . FORMDETAIL ) ) { // Make sure if the use wants to select a record to re - connect the selection Record targetRecord = null ; int iMode = ScreenConstants . DETAIL_MODE ; OnSelectHandler listener = ( OnSelectHandler ) this . getMainRecord ( ) . getListener ( OnSelectHandler . class ) ; if ( listener != null ) { targetRecord = listener . getRecordToSync ( ) ; this . getMainRecord ( ) . removeListener ( listener , false ) ; iMode = iMode | ScreenConstants . SELECT_MODE ; } BasePanel screen = this . onForm ( null , iMode , true , iCommandOptions , null ) ; if ( targetRecord != null ) screen . setSelectQuery ( targetRecord , false ) ; return true ; } else return super . doCommand ( strCommand , sourceSField , iCommandOptions ) ;
public class Normalize { /** * Normalize the values of each row in { @ code Matrix } to be normalized by * the length of each row . * @ param m The matrix to normalize . */ public static void byRow ( Matrix m ) { } }
for ( int i = 0 ; i < m . rows ( ) ; ++ i ) { double rowSum = 0 ; for ( int j = 0 ; j < m . columns ( ) ; ++ j ) rowSum += m . get ( i , j ) ; // Skip row whose sume is zero . if ( rowSum == 0 ) continue ; for ( int j = 0 ; j < m . columns ( ) ; ++ j ) m . set ( i , j , m . get ( i , j ) / rowSum ) ; }
public class ResourceReaderImpl { private String getPropertyValue ( final String key ) { } }
String value = null ; for ( Properties source : _sources ) { value = source . getProperty ( key ) ; if ( value != null ) { break ; } } return value ;
public class BDMImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ SuppressWarnings ( "unchecked" ) @ Override public void eSet ( int featureID , Object newValue ) { } }
switch ( featureID ) { case AfplibPackage . BDM__DM_NAME : setDMName ( ( String ) newValue ) ; return ; case AfplibPackage . BDM__DAT_FMT : setDatFmt ( ( Integer ) newValue ) ; return ; case AfplibPackage . BDM__TRIPLETS : getTriplets ( ) . clear ( ) ; getTriplets ( ) . addAll ( ( Collection < ? extends Triplet > ) newValue ) ; return ; } super . eSet ( featureID , newValue ) ;
public class AbstractConversionTable { /** * Create a cell editor that enables to select a class . * @ return the cell editor . */ protected CellEditor createClassCellEditor ( ) { } }
return new DialogCellEditor ( getControl ( ) ) { @ Override protected Object openDialogBox ( Control cellEditorWindow ) { final OpenTypeSelectionDialog dialog = new OpenTypeSelectionDialog ( getControl ( ) . getShell ( ) , false , PlatformUI . getWorkbench ( ) . getProgressService ( ) , null , IJavaSearchConstants . TYPE ) ; dialog . setTitle ( JavaUIMessages . OpenTypeAction_dialogTitle ) ; dialog . setMessage ( JavaUIMessages . OpenTypeAction_dialogMessage ) ; final int result = dialog . open ( ) ; if ( result != IDialogConstants . OK_ID ) { return null ; } final Object [ ] types = dialog . getResult ( ) ; if ( types == null || types . length != 1 || ! ( types [ 0 ] instanceof IType ) ) { return null ; } final IType type = ( IType ) types [ 0 ] ; final String name = type . getFullyQualifiedName ( ) ; return Strings . emptyIfNull ( name ) ; } } ;
public class ScanSpec { /** * Convenient method to specify expressions ( and the associated name map and * value map ) via { @ link ScanExpressionSpec } . */ @ Beta public ScanSpec withExpressionSpec ( ScanExpressionSpec xspec ) { } }
return withFilterExpression ( xspec . getFilterExpression ( ) ) . withProjectionExpression ( xspec . getProjectionExpression ( ) ) . withNameMap ( xspec . getNameMap ( ) ) . withValueMap ( xspec . getValueMap ( ) ) ;
public class LUDecomposition { /** * Return pivot permutation vector * @ return piv */ @ Nonnull public int [ ] getPivot ( ) { } }
final int [ ] p = new int [ m_nRows ] ; for ( int i = 0 ; i < m_nRows ; i ++ ) p [ i ] = m_aPivot [ i ] ; return p ;
public class MathUtils { /** * Returns a long average number by round up with specify dividend and * divisor . < br > * Returns 0 if dividend = = null or divisor = = null . < br > * Returns 1 if dividend = = divisor or divisor = = 0 . < br > * e . g : average is 3.5 , then return 4. * @ param dividend * number to be handled . * @ param divisor * number to be handled . * @ return a long average number by given dividend and divisor . */ public static < T extends Number > long divideRoundUp ( final T dividend , final T divisor ) { } }
double average = divide ( dividend , divisor ) ; return RoundUp ( average ) ;
public class EvaluateXPathMatcher { /** * Creates a matcher that matches when the examined XML input has a value at the * specified < code > xPath < / code > that satisfies the specified < code > valueMatcher < / code > . * < p > For example : < / p > * < pre > assertThat ( xml , hasXPath ( & quot ; / / fruits / fruit / @ name & quot ; , equalTo ( & quot ; apple & quot ; ) ) < / pre > * @ param xPath the target xpath * @ param valueMatcher matcher for the value at the specified xpath * @ return the xpath matcher */ @ Factory public static EvaluateXPathMatcher hasXPath ( String xPath , Matcher < String > valueMatcher ) { } }
return new EvaluateXPathMatcher ( xPath , valueMatcher ) ;
public class PropertyUtility { /** * Gets the ter . * @ param beanClass * the bean class * @ param property * the property * @ return the ter */ public static String getter ( ModelProperty property ) { } }
if ( property . isPublicField ( ) ) return property . getName ( ) ; if ( property . isFieldWithGetter ( ) ) { return "get" + converterField2Method . convert ( property . getName ( ) ) + "()" ; } else if ( property . isFieldWithIs ( ) ) { return "is" + converterField2Method . convert ( property . getName ( ) ) + "()" ; } else { throw new PropertyVisibilityException ( String . format ( "In class '%s' property '%s' can not be read" , property . getParent ( ) . getElement ( ) . asType ( ) , property . getName ( ) ) ) ; }
public class PathsIteratorImpl { /** * ( non - Javadoc ) * @ see net . jawr . web . resource . bundle . iterator . ResourceBundlePathsIterator # * nextPath ( ) */ @ Override public BundlePath nextPath ( ) { } }
currentBundle = bundlesIterator . next ( ) ; if ( null != currentBundle . getExplorerConditionalExpression ( ) ) commentCallbackHandler . openConditionalComment ( currentBundle . getExplorerConditionalExpression ( ) ) ; String name = currentBundle . getId ( ) ; BundlePath bundlePath = null ; String productionURL = currentBundle . getAlternateProductionURL ( ) ; if ( StringUtils . isEmpty ( productionURL ) ) { bundlePath = new BundlePath ( currentBundle . getBundlePrefix ( ) , PathNormalizer . joinPaths ( currentBundle . getURLPrefix ( variants ) , name ) , false ) ; } else { bundlePath = new BundlePath ( currentBundle . getBundlePrefix ( ) , productionURL , true ) ; } return bundlePath ;
public class OpentracingRestClientFilter { /** * { @ inheritDoc } */ @ Override public void filter ( ClientRequestContext clientRequestContext , ClientResponseContext clientResponseContext ) throws IOException { } }
if ( clientFilter != null ) { clientFilter . filter ( clientRequestContext , clientResponseContext ) ; } else { Tr . debug ( tc , "clientFilter is null" ) ; }
public class AsciiDocExporter { /** * Method that is called to write test suite time to AsciiDoc document . * @ param start time . * @ param stop end time . * @ param duration in millis . */ protected void writeTime ( Date start , Date stop , long duration ) throws IOException { } }
writer . append ( "." ) . append ( this . resourceBundle . getString ( "asciidoc.reporter.time" ) ) . append ( NEW_LINE ) ; writer . append ( "****" ) . append ( NEW_LINE ) ; writer . append ( "*" ) . append ( this . resourceBundle . getString ( "asciidoc.reporter.start" ) ) . append ( "*" ) . append ( " " ) . append ( SIMPLE_DATE_FORMAT . format ( start ) ) . append ( NEW_LINE ) . append ( NEW_LINE ) ; writer . append ( "*" ) . append ( this . resourceBundle . getString ( "asciidoc.reporter.stop" ) ) . append ( "*" ) . append ( " " ) . append ( SIMPLE_DATE_FORMAT . format ( stop ) ) . append ( NEW_LINE ) . append ( NEW_LINE ) ; writer . append ( "*" ) . append ( this . resourceBundle . getString ( "asciidoc.reporter.duration" ) ) . append ( "*" ) . append ( " " ) . append ( convertDuration ( duration , TimeUnit . MILLISECONDS , TimeUnit . SECONDS ) ) . append ( "s" ) . append ( NEW_LINE ) ; writer . append ( "****" ) . append ( NEW_LINE ) . append ( NEW_LINE ) ;
public class LObjIntByteFunctionBuilder { /** * One of ways of creating builder . This might be the only way ( considering all _ functional _ builders ) that might be utilize to specify generic params only once . */ @ Nonnull public static < T , R > LObjIntByteFunctionBuilder < T , R > objIntByteFunction ( Consumer < LObjIntByteFunction < T , R > > consumer ) { } }
return new LObjIntByteFunctionBuilder ( consumer ) ;
public class ClassDocImpl { /** * Return true if this class implements * < code > java . io . Externalizable < / code > . */ public boolean isExternalizable ( ) { } }
try { return env . types . isSubtype ( type , env . externalizableSym . type ) ; } catch ( CompletionFailure ex ) { // quietly ignore completion failures return false ; }
public class ProxyGeneratorAdapter { /** * Generate a call to the delegate object . */ protected MethodVisitor makeDelegateCall ( final String name , final String desc , final String signature , final String [ ] exceptions , final int accessFlags ) { } }
MethodVisitor mv = super . visitMethod ( accessFlags , name , desc , signature , exceptions ) ; mv . visitVarInsn ( ALOAD , 0 ) ; // load this mv . visitFieldInsn ( GETFIELD , proxyName , DELEGATE_OBJECT_FIELD , BytecodeHelper . getTypeDescription ( delegateClass ) ) ; // load delegate // using InvokerHelper to allow potential intercepted calls int size ; mv . visitLdcInsn ( name ) ; // method name Type [ ] args = Type . getArgumentTypes ( desc ) ; BytecodeHelper . pushConstant ( mv , args . length ) ; mv . visitTypeInsn ( ANEWARRAY , "java/lang/Object" ) ; size = 6 ; int idx = 1 ; for ( int i = 0 ; i < args . length ; i ++ ) { Type arg = args [ i ] ; mv . visitInsn ( DUP ) ; BytecodeHelper . pushConstant ( mv , i ) ; // primitive types must be boxed if ( isPrimitive ( arg ) ) { mv . visitIntInsn ( getLoadInsn ( arg ) , idx ) ; String wrappedType = getWrappedClassDescriptor ( arg ) ; mv . visitMethodInsn ( INVOKESTATIC , wrappedType , "valueOf" , "(" + arg . getDescriptor ( ) + ")L" + wrappedType + ";" , false ) ; } else { mv . visitVarInsn ( ALOAD , idx ) ; // load argument i } size = Math . max ( size , 5 + registerLen ( arg ) ) ; idx += registerLen ( arg ) ; mv . visitInsn ( AASTORE ) ; // store value into array } mv . visitMethodInsn ( INVOKESTATIC , "org/codehaus/groovy/runtime/InvokerHelper" , "invokeMethod" , "(Ljava/lang/Object;Ljava/lang/String;Ljava/lang/Object;)Ljava/lang/Object;" , false ) ; unwrapResult ( mv , desc ) ; mv . visitMaxs ( size , registerLen ( args ) + 1 ) ; return mv ;
public class WaitFor { /** * Waits up to the default wait time ( 5 seconds unless changed ) for a cookies with the provided name has a value matching the * expected pattern . This information will be logged and recorded , with a * screenshot for traceability and added debugging support . * @ param cookieName the name of the cookie * @ param expectedCookiePattern the expected value of the cookie */ @ Override public void cookieMatches ( String cookieName , String expectedCookiePattern ) { } }
cookieMatches ( defaultWait , cookieName , expectedCookiePattern ) ;
public class MathBindings { /** * Binding for { @ link java . lang . Math # cos ( double ) } * @ param a an angle , in radians . * @ return the cosine of the argument . */ public static DoubleBinding cos ( final ObservableDoubleValue a ) { } }
return createDoubleBinding ( ( ) -> Math . cos ( a . get ( ) ) , a ) ;
public class SchedulerMain { /** * Construct all required command line options */ private static Options constructOptions ( ) { } }
Options options = new Options ( ) ; Option cluster = Option . builder ( "c" ) . desc ( "Cluster name in which the topology needs to run on" ) . longOpt ( "cluster" ) . hasArgs ( ) . argName ( "cluster" ) . required ( ) . build ( ) ; Option role = Option . builder ( "r" ) . desc ( "Role under which the topology needs to run" ) . longOpt ( "role" ) . hasArgs ( ) . argName ( "role" ) . required ( ) . build ( ) ; Option environment = Option . builder ( "e" ) . desc ( "Environment under which the topology needs to run" ) . longOpt ( "environment" ) . hasArgs ( ) . argName ( "environment" ) . required ( ) . build ( ) ; Option topologyName = Option . builder ( "n" ) . desc ( "Name of the topology" ) . longOpt ( "topology_name" ) . hasArgs ( ) . argName ( "topology name" ) . required ( ) . build ( ) ; Option topologyJar = Option . builder ( "f" ) . desc ( "Topology jar/pex file path" ) . longOpt ( "topology_bin" ) . hasArgs ( ) . argName ( "topology binary file" ) . required ( ) . build ( ) ; Option schedulerHTTPPort = Option . builder ( "p" ) . desc ( "Http Port number on which the scheduler listens for requests" ) . longOpt ( "http_port" ) . hasArgs ( ) . argName ( "http port" ) . required ( ) . build ( ) ; Option property = Option . builder ( SchedulerUtils . SCHEDULER_COMMAND_LINE_PROPERTIES_OVERRIDE_OPTION ) . desc ( "use value for given property" ) . longOpt ( "property_override" ) . hasArgs ( ) . valueSeparator ( ) . argName ( "property=value" ) . build ( ) ; Option verbose = Option . builder ( "v" ) . desc ( "Enable debug logs" ) . longOpt ( "verbose" ) . build ( ) ; options . addOption ( cluster ) ; options . addOption ( role ) ; options . addOption ( environment ) ; options . addOption ( topologyName ) ; options . addOption ( topologyJar ) ; options . addOption ( schedulerHTTPPort ) ; options . addOption ( property ) ; options . addOption ( verbose ) ; return options ;
public class RebalanceController { /** * Pause between cluster change in metadata and starting server rebalancing * work . */ private void proxyPause ( ) { } }
logger . info ( "Pausing after cluster state has changed to allow proxy bridges to be established. " + "Will start rebalancing work on servers in " + proxyPauseSec + " seconds." ) ; try { Thread . sleep ( TimeUnit . SECONDS . toMillis ( proxyPauseSec ) ) ; } catch ( InterruptedException e ) { logger . warn ( "Sleep interrupted in proxy pause." ) ; }
public class ServiceBuilder { /** * Creates a new instance of the ServiceBuilder class which is contained in memory . Any data added to this service will * be lost when the object is garbage collected or the process terminates . * @ param builderConfig The ServiceBuilderConfig to use . * @ param executorBuilder A Function that , given a thread count and a pool name , creates a ScheduledExecutorService * with the given number of threads that have the given name as prefix . * @ return The new instance of the ServiceBuilder . */ @ VisibleForTesting public static ServiceBuilder newInMemoryBuilder ( ServiceBuilderConfig builderConfig , ExecutorBuilder executorBuilder ) { } }
ServiceConfig serviceConfig = builderConfig . getConfig ( ServiceConfig :: builder ) ; ServiceBuilder builder ; if ( serviceConfig . isReadOnlySegmentStore ( ) ) { // Only components required for ReadOnly SegmentStore . builder = new ReadOnlyServiceBuilder ( builderConfig , serviceConfig , executorBuilder ) ; } else { // Components that are required for general SegmentStore . builder = new ServiceBuilder ( builderConfig , serviceConfig , executorBuilder ) . withCacheFactory ( setup -> new InMemoryCacheFactory ( ) ) ; } // Components that are required for all types of SegmentStore . return builder . withDataLogFactory ( setup -> new InMemoryDurableDataLogFactory ( setup . getCoreExecutor ( ) ) ) . withContainerManager ( setup -> new LocalSegmentContainerManager ( setup . getContainerRegistry ( ) , setup . getSegmentToContainerMapper ( ) ) ) . withStorageFactory ( setup -> new InMemoryStorageFactory ( setup . getStorageExecutor ( ) ) ) . withStreamSegmentStore ( setup -> new StreamSegmentService ( setup . getContainerRegistry ( ) , setup . getSegmentToContainerMapper ( ) ) ) ;
public class HeartbeatScheduler { /** * Removes a timer from the scheduler . * This method will fail if the timer is not in the scheduler . * @ param timer the timer to remove */ public static void removeTimer ( ScheduledTimer timer ) { } }
Preconditions . checkNotNull ( timer , "timer" ) ; try ( LockResource r = new LockResource ( sLock ) ) { ScheduledTimer removedTimer = sTimers . remove ( timer . getThreadName ( ) ) ; Preconditions . checkNotNull ( removedTimer , "sTimers should contain %s" , timer . getThreadName ( ) ) ; Preconditions . checkState ( removedTimer == timer , "sTimers should contain the timer being removed" ) ; }
public class UserManagedCacheBuilder { /** * Adds { @ link org . ehcache . expiry . Expiry } configuration to the returned builder . * @ param expiry the expiry to use * @ return a new builer with the added expiry * @ deprecated Use { @ link # withExpiry ( ExpiryPolicy ) } instead */ @ Deprecated public final UserManagedCacheBuilder < K , V , T > withExpiry ( org . ehcache . expiry . Expiry < ? super K , ? super V > expiry ) { } }
if ( expiry == null ) { throw new NullPointerException ( "Null expiry" ) ; } UserManagedCacheBuilder < K , V , T > otherBuilder = new UserManagedCacheBuilder < > ( this ) ; otherBuilder . expiry = ExpiryUtils . convertToExpiryPolicy ( expiry ) ; return otherBuilder ;
public class Normalizer2Impl { /** * Returns the FCD data for code point c . * @ param c A Unicode code point . * @ return The lccc ( c ) in bits 15 . . 8 and tccc ( c ) in bits 7 . . 0. */ public int getFCD16 ( int c ) { } }
if ( c < 0 ) { return 0 ; } else if ( c < 0x180 ) { return tccc180 [ c ] ; } else if ( c <= 0xffff ) { if ( ! singleLeadMightHaveNonZeroFCD16 ( c ) ) { return 0 ; } } return getFCD16FromNormData ( c ) ;
public class A_CmsXmlDocument { /** * Creates a partial deep element copy according to the set of element paths . < p > * Only elements contained in that set will be copied . * @ param element the element to copy * @ param copyElements the set of paths for elements to copy * @ return a partial deep copy of < code > element < / code > */ protected Element createDeepElementCopy ( Element element , Set < String > copyElements ) { } }
return createDeepElementCopyInternal ( null , null , element , copyElements ) ;
public class LocalNetworkGatewaysInner { /** * Creates or updates a local network gateway in the specified resource group . * @ param resourceGroupName The name of the resource group . * @ param localNetworkGatewayName The name of the local network gateway . * @ param parameters Parameters supplied to the create or update local network gateway operation . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ throws CloudException thrown if the request is rejected by server * @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @ return the LocalNetworkGatewayInner object if successful . */ public LocalNetworkGatewayInner createOrUpdate ( String resourceGroupName , String localNetworkGatewayName , LocalNetworkGatewayInner parameters ) { } }
return createOrUpdateWithServiceResponseAsync ( resourceGroupName , localNetworkGatewayName , parameters ) . toBlocking ( ) . last ( ) . body ( ) ;
public class CreatorUtils { /** * @ param mapperTypeList the types to search inside * @ return the first bean type encountered */ private static JClassType findFirstTypeToApplyPropertyAnnotation ( List < JMapperType > mapperTypeList ) { } }
if ( mapperTypeList . isEmpty ( ) ) { return null ; } List < JMapperType > subLevel = new ArrayList < JMapperType > ( ) ; for ( JMapperType mapperType : mapperTypeList ) { if ( mapperType . isBeanMapper ( ) ) { return mapperType . getType ( ) . isClass ( ) ; } else if ( mapperType . getParameters ( ) . size ( ) > 0 ) { subLevel . addAll ( mapperType . getParameters ( ) ) ; } } return findFirstTypeToApplyPropertyAnnotation ( subLevel ) ;
public class Version { /** * < pre > * Custom static error pages . Limited to 10KB per page . * Only returned in ` GET ` requests if ` view = FULL ` is set . * < / pre > * < code > repeated . google . appengine . v1 . ErrorHandler error _ handlers = 101 ; < / code > */ public java . util . List < ? extends com . google . appengine . v1 . ErrorHandlerOrBuilder > getErrorHandlersOrBuilderList ( ) { } }
return errorHandlers_ ;
public class TimestampUtils { /** * Extracts the date part from a timestamp . * @ param millis The timestamp from which to extract the date . * @ param tz The time zone of the date . * @ return The extracted date . */ public Date convertToDate ( long millis , TimeZone tz ) { } }
// no adjustments for the inifity hack values if ( millis <= PGStatement . DATE_NEGATIVE_INFINITY || millis >= PGStatement . DATE_POSITIVE_INFINITY ) { return new Date ( millis ) ; } if ( tz == null ) { tz = getDefaultTz ( ) ; } if ( isSimpleTimeZone ( tz . getID ( ) ) ) { // Truncate to 00:00 of the day . // Suppose the input date is 7 Jan 15:40 GMT + 02:00 ( that is 13:40 UTC ) // We want it to become 7 Jan 00:00 GMT + 02:00 // 1 ) Make sure millis becomes 15:40 in UTC , so add offset int offset = tz . getRawOffset ( ) ; millis += offset ; // 2 ) Truncate hours , minutes , etc . Day is always 86400 seconds , no matter what leap seconds // are millis = millis / ONEDAY * ONEDAY ; // 2 ) Now millis is 7 Jan 00:00 UTC , however we need that in GMT + 02:00 , so subtract some // offset millis -= offset ; // Now we have brand - new 7 Jan 00:00 GMT + 02:00 return new Date ( millis ) ; } Calendar cal = calendarWithUserTz ; cal . setTimeZone ( tz ) ; cal . setTimeInMillis ( millis ) ; cal . set ( Calendar . HOUR_OF_DAY , 0 ) ; cal . set ( Calendar . MINUTE , 0 ) ; cal . set ( Calendar . SECOND , 0 ) ; cal . set ( Calendar . MILLISECOND , 0 ) ; return new Date ( cal . getTimeInMillis ( ) ) ;
public class Word { /** * Creates a word from an array of symbols . * @ param symbols * the symbol array * @ return a word containing the symbols in the specified array */ @ SafeVarargs @ Nonnull public static < I > Word < I > fromSymbols ( I ... symbols ) { } }
if ( symbols . length == 0 ) { return epsilon ( ) ; } if ( symbols . length == 1 ) { return fromLetter ( symbols [ 0 ] ) ; } return new SharedWord < > ( symbols . clone ( ) ) ;
public class AccumuloSplitManager { /** * Gets a list of { @ link AccumuloColumnConstraint } based on the given constraint ID , excluding the row ID column * @ param rowIdName Presto column name mapping to the Accumulo row ID * @ param constraint Set of query constraints * @ return List of all column constraints */ private static List < AccumuloColumnConstraint > getColumnConstraints ( String rowIdName , TupleDomain < ColumnHandle > constraint ) { } }
ImmutableList . Builder < AccumuloColumnConstraint > constraintBuilder = ImmutableList . builder ( ) ; for ( ColumnDomain < ColumnHandle > columnDomain : constraint . getColumnDomains ( ) . get ( ) ) { AccumuloColumnHandle columnHandle = ( AccumuloColumnHandle ) columnDomain . getColumn ( ) ; if ( ! columnHandle . getName ( ) . equals ( rowIdName ) ) { // Family and qualifier will exist for non - row ID columns constraintBuilder . add ( new AccumuloColumnConstraint ( columnHandle . getName ( ) , columnHandle . getFamily ( ) . get ( ) , columnHandle . getQualifier ( ) . get ( ) , Optional . of ( columnDomain . getDomain ( ) ) , columnHandle . isIndexed ( ) ) ) ; } } return constraintBuilder . build ( ) ;
public class Waiter { /** * Waits for a text to be shown . * @ param classToFilterBy the class to filter by * @ param text the text that needs to be shown , specified as a regular expression * @ param expectedMinimumNumberOfMatches the minimum number of matches of text that must be shown . { @ code 0 } means any number of matches * @ param timeout the amount of time in milliseconds to wait * @ param scroll { @ code true } if scrolling should be performed * @ return { @ code true } if text is found and { @ code false } if it is not found before the timeout */ public < T extends TextView > T waitForText ( Class < T > classToFilterBy , String text , int expectedMinimumNumberOfMatches , long timeout , boolean scroll ) { } }
return waitForText ( classToFilterBy , text , expectedMinimumNumberOfMatches , timeout , scroll , false , true ) ;
public class JsBusImpl { /** * Set an individual custom property for the bus */ public void setCustomProperty ( String name , String value ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "setCustomProperty" , name + " " + value ) ; // Set the properties that belong to the bus // customProperties . put ( name , value ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "setCustomProperty" ) ;
public class Either { /** * { @ inheritDoc } */ @ Override @ SuppressWarnings ( "unchecked" ) public final < R2 > Either < L , R2 > biMapR ( Function < ? super R , ? extends R2 > fn ) { } }
return ( Either < L , R2 > ) Bifunctor . super . biMapR ( fn ) ;
public class ChannelFrameworkImpl { /** * This method starts both inbound and outbound chains . * @ param targetChainData * @ param startMode * - indicate how to handle failure conditions * @ throws ChannelException * @ throws ChainException */ public synchronized void startChainInternal ( ChainData targetChainData , ChainStartMode startMode ) throws ChannelException , ChainException { } }
String chainName = targetChainData . getName ( ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) { Tr . entry ( tc , "startChainInternal: " + chainName ) ; } if ( ! targetChainData . isEnabled ( ) ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEventEnabled ( ) ) { Tr . event ( tc , "Chain " + chainName + " is disabled" ) ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) { Tr . exit ( tc , "startChainInternal" ) ; } return ; } ChainData chainData ; // Find the chain in the framework configuration . Chain chain = getRunningChain ( chainName ) ; if ( null == chain ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "Chain not found running. Double check it is configured." ) ; } // Get the chain configuration chainData = chainDataMap . get ( chainName ) ; if ( null == chainData ) { // Did not find the chain in the config . This method must have been // called from something other than startChain ( which does this check ) . // As of 7/20/04 the only other location is the ChainStartAlarmListener . // This is now a case where the framework must have been shut // down while an alarm was set to start the chain later . The alarm will // handle this exception . throw new InvalidChainNameException ( "Unable to start unknown chain, " + chainName ) ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "Chain exists, but not in runtime yet. Initialize it." ) ; } initChainInternal ( chainData ) ; chain = getRunningChain ( chainName ) ; if ( null == chain ) { // This shouldn ' t happen . InvalidChainNameException e = new InvalidChainNameException ( "Unable to start unknown chain, " + chainName ) ; throw e ; } } // Update the chainData to include what ' s in the runtime ( children channel data , not parent ) // Note that the input came from a user who only knows about parent channel data . chainData = chain . getChainData ( ) ; List < Channel > chainsDone = new ArrayList < Channel > ( ) ; try { RuntimeState chainState = chain . getState ( ) ; if ( RuntimeState . INITIALIZED . equals ( chainState ) ) { // Check for inbound vs outbound . Channels are started in different // orders based on this . if ( chainData . getType ( ) . equals ( FlowType . INBOUND ) ) { // Inbound chain . Ensure the disc process of each channel is // initialized . ( ( InboundChain ) chain ) . setupDiscProcess ( ) ; Channel [ ] chainChannels = chain . getChannels ( ) ; ChannelData [ ] channelData = chain . getChannelsData ( ) ; // Loop through the channels starting from the app channel and down to // the dev channel . for ( int i = chainChannels . length - 1 ; i >= 0 ; -- i ) { if ( startChannelInChain ( chainChannels [ i ] , chain ) ) { chainsDone . add ( chainChannels [ i ] ) ; // Only take the next step if the current channel is not the // device // side channel . if ( i != 0 ) { // Start the disc process between ( ( InboundChain ) chain ) . startDiscProcessBetweenChannels ( ( InboundChannel ) chainChannels [ i ] , ( InboundChannel ) chainChannels [ i - 1 ] , channelData [ i ] . getDiscriminatorWeight ( ) ) ; } } else { // The channel was not started . I must already be running . No more // work to do . if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "Channel was already started; " + chainChannels [ i ] . getName ( ) ) ; } break ; } } } else { // Outbound chain . Channel [ ] chainChannels = chain . getChannels ( ) ; // Loop through the channel starts from the app side to the dev side . for ( int i = 0 ; i < chainChannels . length ; ++ i ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "Start channel in chain: " + chainChannels [ i ] . getName ( ) ) ; } // Start the channels . if ( startChannelInChain ( chainChannels [ i ] , chain ) ) { chainsDone . add ( chainChannels [ i ] ) ; } } } } else if ( ! RuntimeState . STARTED . equals ( chainState ) ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "Cannot start chain " + chainData . getName ( ) + ", state: " + chainState . ordinal ) ; } InvalidRuntimeStateException e = new InvalidRuntimeStateException ( "Cannot start chain " + chainData . getName ( ) ) ; throw e ; } } catch ( ChannelException e ) { // Don ' t ffdc or log error messages in cases where a try will take place if ( ! ( e instanceof RetryableChannelException ) || ( startMode != ChainStartMode . RETRY_EACH_ON_FAIL ) ) { FFDCFilter . processException ( e , getClass ( ) . getName ( ) + ".startChainInternal" , "2577" , this , new Object [ ] { chainData } ) ; ( ( ChainDataImpl ) chainData ) . chainStartFailed ( 1 , 0 ) ; Tr . error ( tc , "chain.start.error" , new Object [ ] { chain . getName ( ) , e . toString ( ) } ) ; } else if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "Caught RetryableException" ) ; } // Handle partially created chain . Undo any starts that have been done // thus far . for ( int i = 0 ; i < chainsDone . size ( ) ; i ++ ) { try { // Note , the former channel state may have been quiesced . Stopping // anyhow . stopChannel ( chainsDone . get ( i ) ) ; } catch ( Exception e1 ) { FFDCFilter . processException ( e , getClass ( ) . getName ( ) + ".startChainInternal" , "2589" , this , new Object [ ] { chainsDone . get ( i ) } ) ; } } // Throw the exception up to the caller . throw e ; } catch ( ChainException e ) { FFDCFilter . processException ( e , getClass ( ) . getName ( ) + ".startChainInternal" , "2595" , this , new Object [ ] { chainData } ) ; Tr . error ( tc , "chain.start.error" , new Object [ ] { chain . getName ( ) , e . toString ( ) } ) ; // Handle partially created chain . Undo any starts that have been done // thus far . for ( int i = 0 ; i < chainsDone . size ( ) ; i ++ ) { try { stopChannel ( chainsDone . get ( i ) ) ; } catch ( Exception e1 ) { FFDCFilter . processException ( e , getClass ( ) . getName ( ) + ".startChainInternal" , "2602" , this , new Object [ ] { chainsDone . get ( i ) } ) ; } } // Throw the exception up to the caller . throw e ; } chain . start ( ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) { Tr . exit ( tc , "startChainInternal" ) ; }
public class CndImporter { /** * Parse the child node definition ' s list of required primary types , if they appear next on the token stream . * @ param tokens the tokens containing the definition ; never null * @ param childDefn the child node definition ; never null * @ throws ParsingException if there is a problem parsing the content * @ throws ConstraintViolationException not expected */ protected void parseRequiredPrimaryTypes ( TokenStream tokens , JcrNodeDefinitionTemplate childDefn ) throws ConstraintViolationException { } }
if ( tokens . canConsume ( '(' ) ) { List < Name > requiredTypes = parseNameList ( tokens ) ; if ( requiredTypes . isEmpty ( ) ) { requiredTypes . add ( JcrNtLexicon . BASE ) ; } childDefn . setRequiredPrimaryTypeNames ( names ( requiredTypes ) ) ; tokens . consume ( ')' ) ; }
public class DefaultVOMSACService { /** * Handles warnings included in the VOMS response * @ param request * the request * @ param si * the VOMS server endpoint information * @ param response * the received { @ link VOMSResponse } */ protected void handleWarningsInResponse ( VOMSACRequest request , VOMSServerInfo si , VOMSResponse response ) { } }
if ( response . hasWarnings ( ) ) requestListener . notifyWarningsInVOMSResponse ( request , si , response . warningMessages ( ) ) ;
public class CreatePlayerSessionsRequest { /** * List of unique identifiers for the players to be added . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setPlayerIds ( java . util . Collection ) } or { @ link # withPlayerIds ( java . util . Collection ) } if you want to * override the existing values . * @ param playerIds * List of unique identifiers for the players to be added . * @ return Returns a reference to this object so that method calls can be chained together . */ public CreatePlayerSessionsRequest withPlayerIds ( String ... playerIds ) { } }
if ( this . playerIds == null ) { setPlayerIds ( new java . util . ArrayList < String > ( playerIds . length ) ) ; } for ( String ele : playerIds ) { this . playerIds . add ( ele ) ; } return this ;
public class UpdateNFSFileShareRequest { /** * The list of clients that are allowed to access the file gateway . The list must contain either valid IP addresses * or valid CIDR blocks . * @ param clientList * The list of clients that are allowed to access the file gateway . The list must contain either valid IP * addresses or valid CIDR blocks . */ public void setClientList ( java . util . Collection < String > clientList ) { } }
if ( clientList == null ) { this . clientList = null ; return ; } this . clientList = new com . amazonaws . internal . SdkInternalList < String > ( clientList ) ;
public class DeviceImpl { /** * get State * @ return state */ @ Override public DevState state ( ) { } }
MDC . setContextMap ( contextMap ) ; xlogger . entry ( ) ; try { state = getState ( ) ; } catch ( final DevFailed e ) { try { stateImpl . stateMachine ( DeviceState . UNKNOWN ) ; statusImpl . statusMachine ( DevFailedUtils . toString ( e ) , DeviceState . UNKNOWN ) ; state = DevState . UNKNOWN ; } catch ( final DevFailed e1 ) { logger . debug ( NOT_IMPORTANT_ERROR , e1 ) ; } logger . debug ( NOT_IMPORTANT_ERROR , e ) ; } return state ;
public class CmsLocationPopupContent { /** * On address suggest selection . < p > * @ param event the selection event */ @ UiHandler ( "m_addressField" ) void onAddressSelection ( SelectionEvent < SuggestOracle . Suggestion > event ) { } }
m_controller . onAddressChange ( event . getSelectedItem ( ) ) ;
public class SQLParserFactory { /** * Create SQL parser . * @ param dbType database type * @ param shardingRule databases and tables sharding rule * @ param lexerEngine lexical analysis engine * @ param shardingTableMetaData sharding metadata * @ param sql sql to parse * @ return SQL parser */ public static SQLParser newInstance ( final DatabaseType dbType , final ShardingRule shardingRule , final LexerEngine lexerEngine , final ShardingTableMetaData shardingTableMetaData , final String sql ) { } }
lexerEngine . nextToken ( ) ; TokenType tokenType = lexerEngine . getCurrentToken ( ) . getType ( ) ; if ( DQLStatement . isDQL ( tokenType ) ) { if ( DatabaseType . MySQL == dbType || DatabaseType . H2 == dbType ) { return new AntlrParsingEngine ( dbType , sql , shardingRule , shardingTableMetaData ) ; } return getDQLParser ( dbType , shardingRule , lexerEngine , shardingTableMetaData ) ; } if ( DMLStatement . isDML ( tokenType ) ) { if ( DatabaseType . MySQL == dbType || DatabaseType . H2 == dbType ) { return new AntlrParsingEngine ( dbType , sql , shardingRule , shardingTableMetaData ) ; } return getDMLParser ( dbType , tokenType , shardingRule , lexerEngine , shardingTableMetaData ) ; } if ( MySQLKeyword . REPLACE == tokenType ) { if ( DatabaseType . MySQL == dbType || DatabaseType . H2 == dbType ) { return new AntlrParsingEngine ( dbType , sql , shardingRule , shardingTableMetaData ) ; } } if ( TCLStatement . isTCL ( tokenType ) ) { return new AntlrParsingEngine ( dbType , sql , shardingRule , shardingTableMetaData ) ; } if ( DALStatement . isDAL ( tokenType ) ) { if ( DatabaseType . PostgreSQL == dbType && PostgreSQLKeyword . SHOW == tokenType ) { return new AntlrParsingEngine ( dbType , sql , shardingRule , shardingTableMetaData ) ; } return getDALParser ( dbType , ( Keyword ) tokenType , shardingRule , lexerEngine ) ; } lexerEngine . nextToken ( ) ; TokenType secondaryTokenType = lexerEngine . getCurrentToken ( ) . getType ( ) ; if ( DCLStatement . isDCL ( tokenType , secondaryTokenType ) ) { return new AntlrParsingEngine ( dbType , sql , shardingRule , shardingTableMetaData ) ; } if ( DDLStatement . isDDL ( tokenType , secondaryTokenType ) ) { return new AntlrParsingEngine ( dbType , sql , shardingRule , shardingTableMetaData ) ; } if ( TCLStatement . isTCLUnsafe ( dbType , tokenType , lexerEngine ) ) { return new AntlrParsingEngine ( dbType , sql , shardingRule , shardingTableMetaData ) ; } if ( DefaultKeyword . SET . equals ( tokenType ) ) { return SetParserFactory . newInstance ( ) ; } throw new SQLParsingUnsupportedException ( tokenType ) ;
public class CmsLocationPicker { /** * Sets the location info to the info panel . < p > * @ param infos the location info items */ protected void setLocationInfo ( Map < String , String > infos ) { } }
if ( infos . isEmpty ( ) ) { m_locationInfoPanel . getStyle ( ) . setDisplay ( Display . NONE ) ; } else { StringBuffer infoHtml = new StringBuffer ( ) ; for ( Entry < String , String > info : infos . entrySet ( ) ) { infoHtml . append ( "<p><span>" ) . append ( info . getKey ( ) ) . append ( ":</span>" ) . append ( info . getValue ( ) ) . append ( "</p>" ) ; } m_locationInfoPanel . setInnerHTML ( infoHtml . toString ( ) ) ; m_locationInfoPanel . getStyle ( ) . clearDisplay ( ) ; }
public class Address { /** * syntactic sugar */ public StringType addLineElement ( ) { } }
StringType t = new StringType ( ) ; if ( this . line == null ) this . line = new ArrayList < StringType > ( ) ; this . line . add ( t ) ; return t ;
public class SnapshotsInner { /** * Updates ( patches ) a snapshot . * @ param resourceGroupName The name of the resource group . * @ param snapshotName The name of the snapshot that is being created . The name can ' t be changed after the snapshot is created . Supported characters for the name are a - z , A - Z , 0-9 and _ . The max name length is 80 characters . * @ param snapshot Snapshot object supplied in the body of the Patch snapshot operation . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the SnapshotInner object */ public Observable < SnapshotInner > beginUpdateAsync ( String resourceGroupName , String snapshotName , SnapshotUpdate snapshot ) { } }
return beginUpdateWithServiceResponseAsync ( resourceGroupName , snapshotName , snapshot ) . map ( new Func1 < ServiceResponse < SnapshotInner > , SnapshotInner > ( ) { @ Override public SnapshotInner call ( ServiceResponse < SnapshotInner > response ) { return response . body ( ) ; } } ) ;
public class SoyFileSet { /** * Performs the minimal amount of work needed to calculate TemplateMetadata objects for header * compilation . */ ParseResult compileMinimallyForHeaders ( ) { } }
resetErrorReporter ( ) ; ParseResult result = parse ( passManagerBuilder ( ) // ResolveExpressionTypesPass resolve types ( specifically on default parameter // values ) which is necessary for template metadatas . addPassContinuationRule ( ResolveExpressionTypesPass . class , PassContinuationRule . STOP_AFTER_PASS ) . allowV1Expression ( ) , typeRegistry ) ; throwIfErrorsPresent ( ) ; reportWarnings ( ) ; return result ;
public class AbstractArrayFieldValidator { /** * メッセージキーを指定して 、 エラー情報を追加します 。 * < p > エラーメッセージ中の変数は 、 { @ link # getMessageVariables ( ArrayCellField ) } の値を使用します 。 < / p > * @ param cellField フィールド情報 * @ param messageKey メッセージキー * @ throws IllegalArgumentException { @ literal cellField = = null or messageKey = = null } * @ throws IllegalArgumentException { @ literal messageKey . length ( ) = = 0} */ public void error ( final ArrayCellField < E > cellField , final String messageKey ) { } }
error ( cellField , messageKey , getMessageVariables ( cellField ) ) ;
public class StandaloneXml_Legacy { /** * ManagamentXmlDelegate Methods */ @ Override public boolean parseManagementInterfaces ( XMLExtendedStreamReader reader , ModelNode address , List < ModelNode > operationsList ) throws XMLStreamException { } }
switch ( namespace ) { case DOMAIN_1_0 : parseManagementInterfaces_1_0 ( reader , address , operationsList ) ; break ; default : parseManagementInterfaces_1_1 ( reader , address , operationsList ) ; } return true ;
public class IfcArbitraryProfileDefWithVoidsImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ SuppressWarnings ( "unchecked" ) public EList < IfcCurve > getInnerCurves ( ) { } }
return ( EList < IfcCurve > ) eGet ( Ifc2x3tc1Package . Literals . IFC_ARBITRARY_PROFILE_DEF_WITH_VOIDS__INNER_CURVES , true ) ;
public class ObjectFactory { /** * Create an instance of { @ link JAXBElement } { @ code < } { @ link SurfacePropertyType } { @ code > } * @ param value * Java instance representing xml element ' s value . * @ return * the new instance of { @ link JAXBElement } { @ code < } { @ link SurfacePropertyType } { @ code > } */ @ XmlElementDecl ( namespace = "http://www.opengis.net/gml" , name = "surfaceProperty" ) public JAXBElement < SurfacePropertyType > createSurfaceProperty ( SurfacePropertyType value ) { } }
return new JAXBElement < SurfacePropertyType > ( _SurfaceProperty_QNAME , SurfacePropertyType . class , null , value ) ;
public class Transform { /** * Convert measurements to { @ link MeasurementDTO } s . * @ param measurements * measurements to convert . * @ return converted measurements . */ static Collection < Collection < MeasurementDTO > > measurementDTOs ( Results < Measurement > measurements ) { } }
return Lists . newArrayList ( Iterables . transform ( measurements , new Function < Results . Row < Measurement > , Collection < MeasurementDTO > > ( ) { @ Override public Collection < MeasurementDTO > apply ( Row < Measurement > input ) { return Collections2 . transform ( input . getElements ( ) , new Function < Measurement , MeasurementDTO > ( ) { @ Override public MeasurementDTO apply ( Measurement input ) { return new MeasurementDTO ( input . getTimestamp ( ) . asMillis ( ) , new ResourceDTO ( input . getResource ( ) . getId ( ) , unwrapMap ( input . getResource ( ) . getAttributes ( ) ) ) , input . getName ( ) , input . getValue ( ) , input . getAttributes ( ) ) ; } } ) ; } } ) ) ;
public class Preconditions { /** * Checks the given boolean condition , and throws an { @ code IllegalStateException } if * the condition is not met ( evaluates to { @ code false } ) . The exception will have the * given error message . * @ param condition The condition to check * @ param errorMessage The message for the { @ code IllegalStateException } that is thrown if the check fails . * @ throws IllegalStateException Thrown , if the condition is violated . */ public static void checkState ( boolean condition , @ Nullable Object errorMessage ) { } }
if ( ! condition ) { throw new IllegalStateException ( String . valueOf ( errorMessage ) ) ; }
public class BasicLogRecord { /** * Returns the next value of this log record . * @ param type * the expected type of the value * @ return the next value */ public Constant nextVal ( Type type ) { } }
Constant val = pg . getVal ( currentPos , type ) ; currentPos += Page . size ( val ) ; return val ;
public class PlanNode { /** * Get the keys for the property values that are set on this node . * @ return the property keys ; never null but possibly empty */ public Set < Property > getPropertyKeys ( ) { } }
return nodeProperties != null ? nodeProperties . keySet ( ) : Collections . < Property > emptySet ( ) ;
public class MemcachedClient { /** * Asynchronously get a bunch of objects from the cache . * @ param < T > * @ param keyIter Iterator for the keys to request * @ param tc the transcoder to serialize and unserialize values * @ return a Future result of that fetch * @ throws IllegalStateException in the rare circumstance where queue is too * full to accept any more requests */ @ Override public < T > BulkFuture < Map < String , T > > asyncGetBulk ( Iterator < String > keyIter , Transcoder < T > tc ) { } }
return asyncGetBulk ( keyIter , new SingleElementInfiniteIterator < Transcoder < T > > ( tc ) ) ;
public class MethodId { /** * Returns a descriptor like " ( Ljava / lang / Class ; [ I ) Ljava / lang / Object ; " . */ String descriptor ( boolean includeThis ) { } }
StringBuilder result = new StringBuilder ( ) ; result . append ( "(" ) ; if ( includeThis ) { result . append ( declaringType . name ) ; } for ( TypeId t : parameters . types ) { result . append ( t . name ) ; } result . append ( ")" ) ; result . append ( returnType . name ) ; return result . toString ( ) ;