signature
stringlengths 43
39.1k
| implementation
stringlengths 0
450k
|
|---|---|
public class DatabaseUtils { /** * Utility method to run the query on the db and return the value in the
* first column of the first row . */
public static long longForQuery ( SQLiteDatabase db , String query , String [ ] selectionArgs ) { } }
|
SQLiteStatement prog = db . compileStatement ( query ) ; try { return longForQuery ( prog , selectionArgs ) ; } finally { prog . close ( ) ; }
|
public class EllipseClustersIntoGrid { /** * Checks to see if any node is used more than once */
boolean checkDuplicates ( List < List < NodeInfo > > grid ) { } }
|
for ( int i = 0 ; i < listInfo . size ; i ++ ) { listInfo . get ( i ) . marked = false ; } for ( int i = 0 ; i < grid . size ( ) ; i ++ ) { List < NodeInfo > list = grid . get ( i ) ; for ( int j = 0 ; j < list . size ( ) ; j ++ ) { NodeInfo n = list . get ( j ) ; if ( n . marked ) return true ; n . marked = true ; } } return false ;
|
public class ConstantPool { /** * Adds a integer constant . */
public IntegerConstant addInteger ( int value ) { } }
|
IntegerConstant entry = getIntegerByValue ( value ) ; if ( entry != null ) return entry ; entry = new IntegerConstant ( this , _entries . size ( ) , value ) ; addConstant ( entry ) ; return entry ;
|
public class DateTimeExtensions { /** * Returns an equivalent instance of { @ link java . util . Calendar } .
* The time portion of the returned calendar is cleared and the time zone is the current system default .
* @ param self a LocalDate
* @ return a java . util . Calendar
* @ since 2.5.0 */
public static Calendar toCalendar ( final LocalDate self ) { } }
|
Calendar cal = Calendar . getInstance ( ) ; cal . set ( Calendar . DATE , self . getDayOfMonth ( ) ) ; cal . set ( Calendar . MONTH , self . getMonthValue ( ) - 1 ) ; cal . set ( Calendar . YEAR , self . getYear ( ) ) ; clearTimeCommon ( cal ) ; return cal ;
|
public class Math { /** * Returns the median absolute deviation ( MAD ) . Note that input array will
* be altered after the computation . MAD is a robust measure of
* the variability of a univariate sample of quantitative data . For a
* univariate data set X < sub > 1 < / sub > , X < sub > 2 < / sub > , . . . , X < sub > n < / sub > ,
* the MAD is defined as the median of the absolute deviations from the data ' s median :
* MAD ( X ) = median ( | X < sub > i < / sub > - median ( X < sub > i < / sub > ) | )
* that is , starting with the residuals ( deviations ) from the data ' s median ,
* the MAD is the median of their absolute values .
* MAD is a more robust estimator of scale than the sample variance or
* standard deviation . For instance , MAD is more resilient to outliers in
* a data set than the standard deviation . It thus behaves better with
* distributions without a mean or variance , such as the Cauchy distribution .
* In order to use the MAD as a consistent estimator for the estimation of
* the standard deviation & sigma ; , one takes & sigma ; = K * MAD , where K is
* a constant scale factor , which depends on the distribution . For normally
* distributed data K is taken to be 1.4826 . Other distributions behave
* differently : for example for large samples from a uniform continuous
* distribution , this factor is about 1.1547. */
public static double mad ( int [ ] x ) { } }
|
int m = median ( x ) ; for ( int i = 0 ; i < x . length ; i ++ ) { x [ i ] = Math . abs ( x [ i ] - m ) ; } return median ( x ) ;
|
public class AccountsInner { /** * Gets the specified Azure Storage account linked to the given Data Lake Analytics account .
* @ param resourceGroupName The name of the Azure resource group that contains the Data Lake Analytics account .
* @ param accountName The name of the Data Lake Analytics account from which to retrieve Azure storage account details .
* @ param storageAccountName The name of the Azure Storage account for which to retrieve the details .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ throws CloudException thrown if the request is rejected by server
* @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
* @ return the StorageAccountInfoInner object if successful . */
public StorageAccountInfoInner getStorageAccount ( String resourceGroupName , String accountName , String storageAccountName ) { } }
|
return getStorageAccountWithServiceResponseAsync ( resourceGroupName , accountName , storageAccountName ) . toBlocking ( ) . single ( ) . body ( ) ;
|
public class ReduceTaskStatus { /** * rate */
@ Override public double getCopyProcessingRate ( long currentTime ) { } }
|
@ SuppressWarnings ( "deprecation" ) long bytesCopied = super . getCounters ( ) . findCounter ( Task . Counter . REDUCE_SHUFFLE_BYTES ) . getCounter ( ) ; long timeSpentCopying = 0 ; long startTime = getStartTime ( ) ; if ( getPhase ( ) == Phase . SHUFFLE ) { if ( currentTime <= startTime ) { LOG . error ( "current time is " + currentTime + ", which is <= start " + "time " + startTime + " in " + this . getTaskID ( ) ) ; } timeSpentCopying = currentTime - startTime ; } else { // shuffle phase is done
long shuffleFinishTime = getShuffleFinishTime ( ) ; if ( shuffleFinishTime <= startTime ) { LOG . error ( "Shuffle finish time is " + shuffleFinishTime + ", which is <= start time " + startTime + " in " + this . getTaskID ( ) ) ; return 0 ; } timeSpentCopying = shuffleFinishTime - startTime ; } copyProcessingRate = bytesCopied / timeSpentCopying ; return copyProcessingRate ;
|
public class ns_config_diff { /** * < pre >
* Converts API response of bulk operation into object and returns the object array in case of get request .
* < / pre > */
protected base_resource [ ] get_nitro_bulk_response ( nitro_service service , String response ) throws Exception { } }
|
ns_config_diff_responses result = ( ns_config_diff_responses ) service . get_payload_formatter ( ) . string_to_resource ( ns_config_diff_responses . class , response ) ; if ( result . errorcode != 0 ) { if ( result . errorcode == SESSION_NOT_EXISTS ) service . clear_session ( ) ; throw new nitro_exception ( result . message , result . errorcode , ( base_response [ ] ) result . ns_config_diff_response_array ) ; } ns_config_diff [ ] result_ns_config_diff = new ns_config_diff [ result . ns_config_diff_response_array . length ] ; for ( int i = 0 ; i < result . ns_config_diff_response_array . length ; i ++ ) { result_ns_config_diff [ i ] = result . ns_config_diff_response_array [ i ] . ns_config_diff [ 0 ] ; } return result_ns_config_diff ;
|
public class UmaResourceRegistrationRequest { /** * As resource set .
* @ param profileResult the profile result
* @ return the resource set */
@ JsonIgnore public ResourceSet asResourceSet ( final CommonProfile profileResult ) { } }
|
val resourceSet = new ResourceSet ( ) ; resourceSet . setIconUri ( getIconUri ( ) ) ; resourceSet . setId ( getId ( ) ) ; resourceSet . setName ( getName ( ) ) ; resourceSet . setScopes ( new HashSet < > ( getScopes ( ) ) ) ; resourceSet . setUri ( getUri ( ) ) ; resourceSet . setType ( getType ( ) ) ; resourceSet . setOwner ( profileResult . getId ( ) ) ; resourceSet . setClientId ( OAuth20Utils . getClientIdFromAuthenticatedProfile ( profileResult ) ) ; return resourceSet ;
|
public class ListResourcesForTagOptionResult { /** * Information about the resources .
* @ param resourceDetails
* Information about the resources . */
public void setResourceDetails ( java . util . Collection < ResourceDetail > resourceDetails ) { } }
|
if ( resourceDetails == null ) { this . resourceDetails = null ; return ; } this . resourceDetails = new java . util . ArrayList < ResourceDetail > ( resourceDetails ) ;
|
public class RequestResponse { /** * HTTP request body .
* @ param requestData
* @ return */
public RequestResponse setRequestData ( Object requestData ) { } }
|
this . requestData = requestData ; requestJson = requestData != null ? ( requestData instanceof JsonNode ? ( JsonNode ) requestData : SerializationUtils . toJson ( requestData ) ) : null ; return this ;
|
public class OfflineDataUpload { /** * Gets the offlineDataList value for this OfflineDataUpload .
* @ return offlineDataList * List of offline data in this upload . For AdWords API , each
* offlineDataList can have at most 50
* OfflineData .
* < span class = " constraint Required " > This field is required
* and should not be { @ code null } when it is contained within { @ link
* Operator } s : ADD . < / span > */
public com . google . api . ads . adwords . axis . v201809 . rm . OfflineData [ ] getOfflineDataList ( ) { } }
|
return offlineDataList ;
|
public class HttpServiceTracker { /** * Http Service is down , remove my servlet . */
public void removeService ( ServiceReference reference , Object service ) { } }
|
if ( serviceRegistration != null ) { serviceRegistration . unregister ( ) ; serviceRegistration = null ; } String alias = this . getAlias ( ) ; ( ( HttpService ) service ) . unregister ( alias ) ; if ( servlet instanceof WebappServlet ) ( ( WebappServlet ) servlet ) . free ( ) ; servlet = null ;
|
public class AdapterUtil { /** * Retrieve the message corresponding to the supplied key from the IBMDataStoreAdapterNLS
* properties file . If the message cannot be found , the key is returned .
* @ param key a valid message key from IBMDataStoreAdapterNLS . properties .
* @ param args a list of parameters to include in the translatable message .
* @ return a translated message . */
public static final String getNLSMessage ( String key , Object ... args ) { } }
|
return Tr . formatMessage ( tc , key , args ) ;
|
public class TempStream { /** * Clean up the temp stream . */
@ Override public void destroy ( ) { } }
|
try { close ( ) ; } catch ( IOException e ) { } finally { TempBuffer ptr = _head ; _head = null ; _tail = null ; TempBuffer . freeAll ( ptr ) ; }
|
public class ListCustomVerificationEmailTemplatesResult { /** * A list of the custom verification email templates that exist in your account .
* < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use
* { @ link # setCustomVerificationEmailTemplates ( java . util . Collection ) } or
* { @ link # withCustomVerificationEmailTemplates ( java . util . Collection ) } if you want to override the existing values .
* @ param customVerificationEmailTemplates
* A list of the custom verification email templates that exist in your account .
* @ return Returns a reference to this object so that method calls can be chained together . */
public ListCustomVerificationEmailTemplatesResult withCustomVerificationEmailTemplates ( CustomVerificationEmailTemplate ... customVerificationEmailTemplates ) { } }
|
if ( this . customVerificationEmailTemplates == null ) { setCustomVerificationEmailTemplates ( new com . amazonaws . internal . SdkInternalList < CustomVerificationEmailTemplate > ( customVerificationEmailTemplates . length ) ) ; } for ( CustomVerificationEmailTemplate ele : customVerificationEmailTemplates ) { this . customVerificationEmailTemplates . add ( ele ) ; } return this ;
|
public class TextUtils { /** * Debugging tool to print the spans in a CharSequence . The output will
* be printed one span per line . If the CharSequence is not a Spanned ,
* then the entire string will be printed on a single line . */
public static void dumpSpans ( CharSequence cs , Printer printer , String prefix ) { } }
|
if ( cs instanceof Spanned ) { Spanned sp = ( Spanned ) cs ; Object [ ] os = sp . getSpans ( 0 , cs . length ( ) , Object . class ) ; for ( int i = 0 ; i < os . length ; i ++ ) { Object o = os [ i ] ; printer . println ( prefix + cs . subSequence ( sp . getSpanStart ( o ) , sp . getSpanEnd ( o ) ) + ": " + Integer . toHexString ( System . identityHashCode ( o ) ) + " " + o . getClass ( ) . getCanonicalName ( ) + " (" + sp . getSpanStart ( o ) + "-" + sp . getSpanEnd ( o ) + ") fl=#" + sp . getSpanFlags ( o ) ) ; } } else { printer . println ( prefix + cs + ": (no spans)" ) ; }
|
public class EventStackHelper { /** * Checks whether the given Event should be prevented according to the given
* event stack . If so , the event ' s { @ link Event # isPrevented ( ) isPrevented }
* property is set to < code > true < / code > and a new { @ link SuppressedEvent } is
* added to the < em > preventing < / em > event .
* @ param < L > Type of the listener .
* @ param < E > Type of the event .
* @ param eventStack The event stack .
* @ param event The Event to check whether it is prevented .
* @ param bc Function to delegate the event to the specific callback method
* of the listener .
* @ param ec Callback to be notified when any of the listeners throws an
* exception .
* @ return Whether the event should be prevented . */
public static < L extends Listener , E extends Event < ? , L > > boolean checkPrevent ( EventStack eventStack , E event , BiConsumer < L , E > bc , ExceptionCallback ec ) { } }
|
// check if any of the currently dispatched events marked the target
// listener class to be prevented .
final Optional < SequentialEvent < ? , ? > > cause = eventStack . preventDispatch ( event . getListenerClass ( ) ) ; if ( cause . isPresent ( ) ) { event . setPrevented ( true ) ; cause . get ( ) . addSuppressedEvent ( new SuppressedEventImpl < L , E > ( event , ec , bc ) ) ; return true ; } return false ;
|
public class HttpRequest { /** * 将请求Header转换成Map
* @ param map Map
* @ return Map */
public Map < String , String > getHeadersToMap ( Map < String , String > map ) { } }
|
if ( map == null ) map = new LinkedHashMap < > ( ) ; final Map < String , String > map0 = map ; header . forEach ( ( k , v ) -> map0 . put ( k , v ) ) ; return map0 ;
|
public class SpringDataSourceBeanPostProcessor { /** * { @ inheritDoc } */
@ Override public Object postProcessAfterInitialization ( Object bean , String beanName ) { } }
|
if ( bean instanceof DataSource ) { // on ne teste isExcludedDataSource que si on est sur une datasource
if ( isExcludedDataSource ( beanName ) || Parameters . isNoDatabase ( ) || isDelegatingDataSourceAndAlreadyProxied ( bean , beanName ) ) { return bean ; } final DataSource dataSource = ( DataSource ) bean ; JdbcWrapper . registerSpringDataSource ( beanName , dataSource ) ; final DataSource result = JdbcWrapper . SINGLETON . createDataSourceProxy ( beanName , dataSource ) ; LOG . debug ( "Spring datasource wrapped: " + beanName ) ; return result ; } else if ( bean instanceof JndiObjectFactoryBean ) { // ou sur un JndiObjectFactoryBean
if ( isExcludedDataSource ( beanName ) || Parameters . isNoDatabase ( ) ) { return bean ; } // fix issue 20
final Object result = createProxy ( bean , beanName ) ; LOG . debug ( "Spring JNDI factory wrapped: " + beanName ) ; return result ; } // I tried here in the post - processor to fix " quartz jobs which are scheduled with spring
// are not displayed in javamelody , except if there is the following property for
// SchedulerFactoryBean in spring xml :
// < property name = " exposeSchedulerInRepository " value = " true " / > " ,
// but I had some problem with Spring creating the scheduler
// twice and so registering the scheduler in SchedulerRepository with the same name
// as the one registered below ( and Quartz wants not )
// else if ( bean ! = null
// & & " org . springframework . scheduling . quartz . SchedulerFactoryBean " . equals ( bean
// . getClass ( ) . getName ( ) ) ) {
// try {
// / / Remarque : on ajoute nous même le scheduler de Spring dans le SchedulerRepository
// / / de Quartz , car l ' appel ici de schedulerFactoryBean . setExposeSchedulerInRepository ( true )
// / / est trop tard et ne fonctionnerait pas
// final Method method = bean . getClass ( ) . getMethod ( " getScheduler " , ( Class < ? > [ ] ) null ) ;
// final Scheduler scheduler = ( Scheduler ) method . invoke ( bean , ( Object [ ] ) null ) ;
// final SchedulerRepository schedulerRepository = SchedulerRepository . getInstance ( ) ;
// synchronized ( schedulerRepository ) {
// if ( schedulerRepository . lookup ( scheduler . getSchedulerName ( ) ) = = null ) {
// schedulerRepository . bind ( scheduler ) ;
// scheduler . addGlobalJobListener ( new JobGlobalListener ( ) ) ;
// } catch ( final NoSuchMethodException e ) {
// / / si la méthode n ' existe pas ( avant spring 2.5.6 ) , alors cela marche sans rien faire
// return bean ;
// } catch ( final InvocationTargetException e ) {
// / / tant pis
// return bean ;
// } catch ( final IllegalAccessException e ) {
// / / tant pis
// return bean ;
// } catch ( SchedulerException e ) {
// / / tant pis
// return bean ;
return bean ;
|
public class Probability { /** * This function returns the logical exclusive disjunction of the specified probabilities . The value
* of the logical exclusive disjunction of two probabilities is sans ( P , Q ) + sans ( Q , P ) .
* @ param probability1 The first probability .
* @ param probability2 The second probability .
* @ return The logical exclusive disjunction of the two probabilities . */
static public Probability xor ( Probability probability1 , Probability probability2 ) { } }
|
double p1 = probability1 . value ; double p2 = probability2 . value ; return new Probability ( p1 * ( 1.0d - p2 ) + p2 * ( 1.0d - p1 ) ) ;
|
public class ElasticPoolActivitiesInner { /** * Returns elastic pool activities .
* @ param resourceGroupName The name of the resource group that contains the resource . You can obtain this value from the Azure Resource Manager API or the portal .
* @ param serverName The name of the server .
* @ param elasticPoolName The name of the elastic pool for which to get the current activity .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the observable to the List & lt ; ElasticPoolActivityInner & gt ; object */
public Observable < ServiceResponse < List < ElasticPoolActivityInner > > > listByElasticPoolWithServiceResponseAsync ( String resourceGroupName , String serverName , String elasticPoolName ) { } }
|
if ( this . client . subscriptionId ( ) == null ) { throw new IllegalArgumentException ( "Parameter this.client.subscriptionId() is required and cannot be null." ) ; } if ( resourceGroupName == null ) { throw new IllegalArgumentException ( "Parameter resourceGroupName is required and cannot be null." ) ; } if ( serverName == null ) { throw new IllegalArgumentException ( "Parameter serverName is required and cannot be null." ) ; } if ( elasticPoolName == null ) { throw new IllegalArgumentException ( "Parameter elasticPoolName is required and cannot be null." ) ; } if ( this . client . apiVersion ( ) == null ) { throw new IllegalArgumentException ( "Parameter this.client.apiVersion() is required and cannot be null." ) ; } return service . listByElasticPool ( this . client . subscriptionId ( ) , resourceGroupName , serverName , elasticPoolName , this . client . apiVersion ( ) , this . client . acceptLanguage ( ) , this . client . userAgent ( ) ) . flatMap ( new Func1 < Response < ResponseBody > , Observable < ServiceResponse < List < ElasticPoolActivityInner > > > > ( ) { @ Override public Observable < ServiceResponse < List < ElasticPoolActivityInner > > > call ( Response < ResponseBody > response ) { try { ServiceResponse < PageImpl < ElasticPoolActivityInner > > result = listByElasticPoolDelegate ( response ) ; List < ElasticPoolActivityInner > items = null ; if ( result . body ( ) != null ) { items = result . body ( ) . items ( ) ; } ServiceResponse < List < ElasticPoolActivityInner > > clientResponse = new ServiceResponse < List < ElasticPoolActivityInner > > ( items , result . response ( ) ) ; return Observable . just ( clientResponse ) ; } catch ( Throwable t ) { return Observable . error ( t ) ; } } } ) ;
|
public class Descriptives { /** * Calculates Harmonic Mean
* @ param flatDataCollection
* @ return */
public static double harmonicMean ( FlatDataCollection flatDataCollection ) { } }
|
int n = 0 ; double harmonicMean = 0.0 ; Iterator < Double > it = flatDataCollection . iteratorDouble ( ) ; while ( it . hasNext ( ) ) { Double v = it . next ( ) ; if ( v != null ) { ++ n ; harmonicMean += 1.0 / v ; } } harmonicMean = n / harmonicMean ; return harmonicMean ;
|
public class StyleUtil { /** * 设置cell的四个边框粗细和颜色
* @ param cellStyle { @ link CellStyle }
* @ param borderSize 边框粗细 { @ link BorderStyle } 枚举
* @ param colorIndex 颜色的short值
* @ return { @ link CellStyle } */
public static CellStyle setBorder ( CellStyle cellStyle , BorderStyle borderSize , IndexedColors colorIndex ) { } }
|
cellStyle . setBorderBottom ( borderSize ) ; cellStyle . setBottomBorderColor ( colorIndex . index ) ; cellStyle . setBorderLeft ( borderSize ) ; cellStyle . setLeftBorderColor ( colorIndex . index ) ; cellStyle . setBorderRight ( borderSize ) ; cellStyle . setRightBorderColor ( colorIndex . index ) ; cellStyle . setBorderTop ( borderSize ) ; cellStyle . setTopBorderColor ( colorIndex . index ) ; return cellStyle ;
|
public class MailContent { /** * Adds a new row with three columns .
* @ param firstCell the first cell text content
* @ param secondCell the second cell text content
* @ param thirdCell the third cell text content
* @ return the changed mail content object */
public MailContent row ( String firstCell , String secondCell , String thirdCell ) { } }
|
return tag ( "tr" ) . cell ( firstCell ) . cell ( secondCell ) . cell ( thirdCell ) . end ( ) ;
|
public class DateTimeZone { /** * Adjusts the offset to be the earlier or later one during an overlap .
* @ param instant the instant to adjust
* @ param earlierOrLater false for earlier , true for later
* @ return the adjusted instant millis */
public long adjustOffset ( long instant , boolean earlierOrLater ) { } }
|
// a bit messy , but will work in all non - pathological cases
// evaluate 3 hours before and after to work out if anything is happening
long instantBefore = instant - 3 * DateTimeConstants . MILLIS_PER_HOUR ; long instantAfter = instant + 3 * DateTimeConstants . MILLIS_PER_HOUR ; long offsetBefore = getOffset ( instantBefore ) ; long offsetAfter = getOffset ( instantAfter ) ; if ( offsetBefore <= offsetAfter ) { return instant ; // not an overlap ( less than is a gap , equal is normal case )
} // work out range of instants that have duplicate local times
long diff = offsetBefore - offsetAfter ; long transition = nextTransition ( instantBefore ) ; long overlapStart = transition - diff ; long overlapEnd = transition + diff ; if ( instant < overlapStart || instant >= overlapEnd ) { return instant ; // not an overlap
} // calculate result
long afterStart = instant - overlapStart ; if ( afterStart >= diff ) { // currently in later offset
return earlierOrLater ? instant : instant - diff ; } else { // currently in earlier offset
return earlierOrLater ? instant + diff : instant ; }
|
public class CanonicalXML { /** * Receive notification of the end of an element .
* < p > By default , do nothing . Application writers may override this
* method in a subclass to take specific actions at the end of
* each element ( such as finalising a tree node or writing
* output to a file ) . < / p >
* @ param uri The Namespace URI , or the empty string if the
* element has no Namespace URI or if Namespace
* processing is not being performed .
* @ param localName The local name ( without prefix ) , or the
* empty string if Namespace processing is not being
* performed .
* @ param qName The qualified name ( with prefix ) , or the
* empty string if qualified names are not available .
* @ throws org . xml . sax . SAXException Any SAX exception , possibly
* wrapping another exception .
* @ see org . xml . sax . ContentHandler # endElement */
@ Override public void endElement ( String uri , String localName , String qName ) throws SAXException { } }
|
flushChars ( ) ; write ( "</" ) ; write ( qName ) ; write ( ">" ) ;
|
public class SendPasswordResetInstructionsAction { /** * Utility method to generate a password reset URL .
* @ param username username
* @ param passwordManagementService passwordManagementService
* @ param casProperties casProperties
* @ return URL a user can use to start the password reset process */
public String buildPasswordResetUrl ( final String username , final PasswordManagementService passwordManagementService , final CasConfigurationProperties casProperties ) { } }
|
val token = passwordManagementService . createToken ( username ) ; if ( StringUtils . isNotBlank ( token ) ) { val transientFactory = ( TransientSessionTicketFactory ) this . ticketFactory . get ( TransientSessionTicket . class ) ; val serverPrefix = casProperties . getServer ( ) . getPrefix ( ) ; val service = webApplicationServiceFactory . createService ( serverPrefix ) ; val properties = CollectionUtils . < String , Serializable > wrap ( PasswordManagementWebflowUtils . FLOWSCOPE_PARAMETER_NAME_TOKEN , token ) ; val ticket = transientFactory . create ( service , properties ) ; this . ticketRegistry . addTicket ( ticket ) ; return serverPrefix . concat ( '/' + CasWebflowConfigurer . FLOW_ID_LOGIN + '?' + PasswordManagementWebflowUtils . REQUEST_PARAMETER_NAME_PASSWORD_RESET_TOKEN + '=' ) . concat ( ticket . getId ( ) ) ; } LOGGER . error ( "Could not create password reset url since no reset token could be generated" ) ; return null ;
|
public class MtasSolrComponentTermvector { /** * Distributed process missing key .
* @ param rb the rb
* @ param mtasFields the mtas fields
* @ throws IOException Signals that an I / O exception has occurred . */
private void distributedProcessMissingKey ( ResponseBuilder rb , ComponentFields mtasFields ) throws IOException { } }
|
HashMap < String , HashMap < String , HashSet < String > > > missingTermvectorKeys = computeMissingTermvectorItemsPerShard ( rb . finished , "mtas" , NAME ) ; for ( Entry < String , HashMap < String , HashSet < String > > > entry : missingTermvectorKeys . entrySet ( ) ) { HashMap < String , HashSet < String > > missingTermvectorKeysShard = entry . getValue ( ) ; ModifiableSolrParams paramsNewRequest = new ModifiableSolrParams ( ) ; int termvectorCounter = 0 ; for ( String field : mtasFields . list . keySet ( ) ) { List < ComponentTermVector > tvList = mtasFields . list . get ( field ) . termVectorList ; if ( tvList != null ) { for ( ComponentTermVector tv : tvList ) { if ( ! tv . full ) { if ( missingTermvectorKeysShard . containsKey ( tv . key ) ) { HashSet < String > list = missingTermvectorKeysShard . get ( tv . key ) ; if ( ! list . isEmpty ( ) ) { paramsNewRequest . add ( PARAM_MTAS_TERMVECTOR + "." + termvectorCounter + "." + NAME_MTAS_TERMVECTOR_FIELD , field ) ; paramsNewRequest . add ( PARAM_MTAS_TERMVECTOR + "." + termvectorCounter + "." + NAME_MTAS_TERMVECTOR_PREFIX , tv . prefix ) ; paramsNewRequest . add ( PARAM_MTAS_TERMVECTOR + "." + termvectorCounter + "." + NAME_MTAS_TERMVECTOR_KEY , tv . key ) ; if ( tv . subComponentFunction . type != null ) { paramsNewRequest . add ( PARAM_MTAS_TERMVECTOR + "." + termvectorCounter + "." + NAME_MTAS_TERMVECTOR_TYPE , tv . subComponentFunction . type ) ; } if ( tv . distances != null ) { int distanceCounter = 0 ; for ( SubComponentDistance distance : tv . distances ) { paramsNewRequest . add ( PARAM_MTAS_TERMVECTOR + "." + termvectorCounter + "." + NAME_MTAS_TERMVECTOR_DISTANCE + "." + distanceCounter + "." + NAME_MTAS_TERMVECTOR_DISTANCE_TYPE , distance . type ) ; paramsNewRequest . add ( PARAM_MTAS_TERMVECTOR + "." + termvectorCounter + "." + NAME_MTAS_TERMVECTOR_DISTANCE + "." + distanceCounter + "." + NAME_MTAS_TERMVECTOR_DISTANCE_BASE , distance . base ) ; if ( distance . key != null ) { paramsNewRequest . add ( PARAM_MTAS_TERMVECTOR + "." + termvectorCounter + "." + NAME_MTAS_TERMVECTOR_DISTANCE + "." + distanceCounter + "." + NAME_MTAS_TERMVECTOR_DISTANCE_KEY , distance . key ) ; } if ( distance . minimum != null ) { paramsNewRequest . add ( PARAM_MTAS_TERMVECTOR + "." + termvectorCounter + "." + NAME_MTAS_TERMVECTOR_DISTANCE + "." + distanceCounter + "." + NAME_MTAS_TERMVECTOR_DISTANCE_MINIMUM , String . valueOf ( distance . minimum ) ) ; } if ( distance . maximum != null ) { paramsNewRequest . add ( PARAM_MTAS_TERMVECTOR + "." + termvectorCounter + "." + NAME_MTAS_TERMVECTOR_DISTANCE + "." + distanceCounter + "." + NAME_MTAS_TERMVECTOR_DISTANCE_MAXIMUM , String . valueOf ( distance . maximum ) ) ; } if ( distance . parameters != null ) { for ( Entry < String , String > parameter : distance . parameters . entrySet ( ) ) { paramsNewRequest . add ( PARAM_MTAS_TERMVECTOR + "." + termvectorCounter + "." + NAME_MTAS_TERMVECTOR_DISTANCE + "." + distanceCounter + "." + NAME_MTAS_TERMVECTOR_DISTANCE_PARAMETER + "." + parameter . getKey ( ) , parameter . getValue ( ) ) ; } } distanceCounter ++ ; } } if ( tv . functions != null ) { int functionCounter = 0 ; for ( SubComponentFunction function : tv . functions ) { paramsNewRequest . add ( PARAM_MTAS_TERMVECTOR + "." + termvectorCounter + "." + NAME_MTAS_TERMVECTOR_FUNCTION + "." + functionCounter + "." + NAME_MTAS_TERMVECTOR_FUNCTION_EXPRESSION , function . expression ) ; paramsNewRequest . add ( PARAM_MTAS_TERMVECTOR + "." + termvectorCounter + "." + NAME_MTAS_TERMVECTOR_FUNCTION + "." + functionCounter + "." + NAME_MTAS_TERMVECTOR_FUNCTION_KEY , function . key ) ; if ( function . type != null ) { paramsNewRequest . add ( PARAM_MTAS_TERMVECTOR + "." + termvectorCounter + "." + NAME_MTAS_TERMVECTOR_FUNCTION + "." + functionCounter + "." + NAME_MTAS_TERMVECTOR_FUNCTION_TYPE , function . type ) ; } functionCounter ++ ; } } if ( tv . regexp != null ) { paramsNewRequest . add ( PARAM_MTAS_TERMVECTOR + "." + termvectorCounter + "." + NAME_MTAS_TERMVECTOR_REGEXP , tv . regexp ) ; } if ( ! list . isEmpty ( ) ) { StringBuilder listValue = new StringBuilder ( ) ; String [ ] listList = list . toArray ( new String [ list . size ( ) ] ) ; for ( int i = 0 ; i < listList . length ; i ++ ) { if ( i > 0 ) { listValue . append ( "," ) ; } listValue . append ( listList [ i ] . replace ( "\\" , "\\\\" ) . replace ( "," , "\\\\" ) ) ; } paramsNewRequest . add ( PARAM_MTAS_TERMVECTOR + "." + termvectorCounter + "." + NAME_MTAS_TERMVECTOR_FULL , "false" ) ; paramsNewRequest . add ( PARAM_MTAS_TERMVECTOR + "." + termvectorCounter + "." + NAME_MTAS_TERMVECTOR_LIST , listValue . toString ( ) ) ; } termvectorCounter ++ ; } } } } if ( termvectorCounter > 0 ) { ShardRequest nsreq = new ShardRequest ( ) ; nsreq . shards = new String [ ] { entry . getKey ( ) } ; nsreq . purpose = ShardRequest . PURPOSE_PRIVATE ; nsreq . params = new ModifiableSolrParams ( ) ; nsreq . params . add ( CommonParams . FQ , rb . req . getParams ( ) . getParams ( CommonParams . FQ ) ) ; nsreq . params . add ( CommonParams . Q , rb . req . getParams ( ) . getParams ( CommonParams . Q ) ) ; nsreq . params . add ( CommonParams . CACHE , rb . req . getParams ( ) . getParams ( CommonParams . CACHE ) ) ; nsreq . params . add ( CommonParams . ROWS , "0" ) ; nsreq . params . add ( MtasSolrSearchComponent . PARAM_MTAS , rb . req . getOriginalParams ( ) . getParams ( MtasSolrSearchComponent . PARAM_MTAS ) ) ; nsreq . params . add ( PARAM_MTAS_TERMVECTOR , rb . req . getOriginalParams ( ) . getParams ( PARAM_MTAS_TERMVECTOR ) ) ; nsreq . params . add ( paramsNewRequest ) ; rb . addRequest ( searchComponent , nsreq ) ; } } } }
|
public class MapFileGenerator { /** * Generate an IKVM map file .
* @ param mapFileName map file name
* @ param jarFile jar file containing code to be mapped
* @ param mapClassMethods true if we want to produce . Net style class method names
* @ throws IOException
* @ throws XMLStreamException
* @ throws ClassNotFoundException
* @ throws IntrospectionException */
private void writeMapFile ( String mapFileName , File jarFile , boolean mapClassMethods ) throws IOException , XMLStreamException , ClassNotFoundException , IntrospectionException { } }
|
FileWriter fw = new FileWriter ( mapFileName ) ; XMLOutputFactory xof = XMLOutputFactory . newInstance ( ) ; XMLStreamWriter writer = xof . createXMLStreamWriter ( fw ) ; // XMLStreamWriter writer = new IndentingXMLStreamWriter ( xof . createXMLStreamWriter ( fw ) ) ;
writer . writeStartDocument ( ) ; writer . writeStartElement ( "root" ) ; writer . writeStartElement ( "assembly" ) ; addClasses ( writer , jarFile , mapClassMethods ) ; writer . writeEndElement ( ) ; writer . writeEndElement ( ) ; writer . writeEndDocument ( ) ; writer . flush ( ) ; writer . close ( ) ; fw . flush ( ) ; fw . close ( ) ;
|
public class Files { /** * Reads all of the lines from a file . The lines do not include line - termination characters , but
* do include other leading and trailing whitespace .
* < p > This method returns a mutable { @ code List } . For an { @ code ImmutableList } , use
* { @ code Files . asCharSource ( file , charset ) . readLines ( ) } .
* @ param file the file to read from
* @ param charset the charset used to decode the input stream ; see { @ link StandardCharsets } for
* helpful predefined constants
* @ return a mutable { @ link List } containing all the lines
* @ throws IOException if an I / O error occurs */
public static List < String > readLines ( File file , Charset charset ) throws IOException { } }
|
// don ' t use asCharSource ( file , charset ) . readLines ( ) because that returns
// an immutable list , which would change the behavior of this method
return readLines ( file , charset , new LineProcessor < List < String > > ( ) { final List < String > result = Lists . newArrayList ( ) ; @ Override public boolean processLine ( String line ) { result . add ( line ) ; return true ; } @ Override public List < String > getResult ( ) { return result ; } } ) ;
|
public class BaseTypeSignature { /** * / * ( non - Javadoc )
* @ see io . github . classgraph . ScanResultObject # loadClass ( java . lang . Class ) */
@ Override < T > Class < T > loadClass ( final Class < T > superclassOrInterfaceType ) { } }
|
final Class < ? > type = getType ( ) ; if ( ! superclassOrInterfaceType . isAssignableFrom ( type ) ) { throw new IllegalArgumentException ( "Primitive class " + baseType + " cannot be cast to " + superclassOrInterfaceType . getName ( ) ) ; } @ SuppressWarnings ( "unchecked" ) final Class < T > classT = ( Class < T > ) type ; return classT ;
|
public class AOStream { /** * Helper function . Writes ranges of completed ticks and updates them to include the expanded range
* Called from withing a synchronized ( this ) block .
* @ param completedRanges List of TickRange objects representing ranges to write */
private final void doWriteCompletedRanges ( List completedRanges ) { } }
|
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "doWriteCompletedRanges" , new Object [ ] { completedRanges } ) ; int length = completedRanges . size ( ) ; for ( int i = 0 ; i < length ; i ++ ) { completedRanges . set ( i , stream . writeCompletedRange ( ( TickRange ) completedRanges . get ( i ) ) ) ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "doWriteCompletedRanges" ) ;
|
public class TaskDataImpl { /** * Removes the Attachment specified by the attachmentId .
* @ param attachmentId id of attachment to remove
* @ return removed Attachment or null if one was not found with the id */
public Attachment removeAttachment ( final long attachmentId ) { } }
|
Attachment removedAttachment = null ; if ( attachments != null ) { for ( int index = attachments . size ( ) - 1 ; index >= 0 ; -- index ) { Attachment currentAttachment = attachments . get ( index ) ; if ( currentAttachment . getId ( ) == attachmentId ) { removedAttachment = attachments . remove ( index ) ; break ; } } } return removedAttachment ;
|
public class DateUtils { /** * Roll the java . util . Time forward or backward .
* @ param startDate - The start date
* @ param period Calendar . YEAR etc
* @ param amount - Negative to rollbackwards . */
public static java . sql . Time rollTime ( java . util . Date startDate , int period , int amount ) { } }
|
GregorianCalendar gc = new GregorianCalendar ( ) ; gc . setTime ( startDate ) ; gc . add ( period , amount ) ; return new java . sql . Time ( gc . getTime ( ) . getTime ( ) ) ;
|
public class RETemplateGroup { /** * 得到单个文件的模板
* @ param fileName
* Jul 16 , 2009 */
public void load ( String fileName ) { } }
|
try { InputStreamReader read = new InputStreamReader ( new FileInputStream ( fileName ) , "utf-8" ) ; BufferedReader bin = new BufferedReader ( read ) ; RETemplate qt ; qt = new RETemplate ( ) ; qt . comment = fileName ; StringBuilder sb ; String line ; // if ( fileName . contains ( " 歌曲 - 无修饰推荐 " ) )
// errorLogger . debug ( " " ) ;
// 读入前缀 、 后缀
String prefix = "" ; String suffix = "" ; while ( ( line = bin . readLine ( ) ) != null ) { if ( line . length ( ) == 0 ) break ; if ( line . charAt ( 0 ) == '@' ) { if ( line . substring ( 1 , 7 ) . compareTo ( "PREFIX" ) == 0 ) prefix = line . substring ( 8 ) ; if ( line . substring ( 1 , 7 ) . compareTo ( "SUFFIX" ) == 0 ) suffix = line . substring ( 8 ) ; } } // 读入模板
while ( ( line = bin . readLine ( ) ) != null ) { if ( line . length ( ) == 0 ) break ; line = prefix + line + suffix ; try { qt . addTemplate ( line , 1 ) ; count ++ ; } catch ( Exception e ) { System . out . println ( fileName ) ; continue ; } } group . add ( qt ) ; } catch ( Exception e1 ) { e1 . printStackTrace ( ) ; }
|
public class TaskTracker { /** * The server retry loop .
* This while - loop attempts to connect to the JobTracker . */
public void run ( ) { } }
|
try { startCleanupThreads ( ) ; try { // This while - loop attempts reconnects if we get network errors
while ( running && ! shuttingDown ) { try { State osState = offerService ( ) ; if ( osState == State . STALE || osState == State . DENIED ) { // Shutdown TaskTracker instead of reinitialize .
// TaskTracker should be restarted by external tools .
LOG . error ( "offerService returns " + osState + ". Shutdown. " ) ; break ; } } catch ( Exception ex ) { if ( ! shuttingDown ) { LOG . info ( "Lost connection to JobTracker [" + jobTrackAddr + "]. Retrying..." , ex ) ; try { Thread . sleep ( 5000 ) ; } catch ( InterruptedException ie ) { } } } } } finally { shutdown ( ) ; } } catch ( IOException iex ) { LOG . error ( "Got fatal exception while initializing TaskTracker" , iex ) ; return ; }
|
public class CPDImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
@ Override public Object eGet ( int featureID , boolean resolve , boolean coreType ) { } }
|
switch ( featureID ) { case AfplibPackage . CPD__CP_DESC : return getCPDesc ( ) ; case AfplibPackage . CPD__GCGID_LEN : return getGCGIDLen ( ) ; case AfplibPackage . CPD__NUM_CD_PTS : return getNumCdPts ( ) ; case AfplibPackage . CPD__GCSGID : return getGCSGID ( ) ; case AfplibPackage . CPD__CPGID : return getCPGID ( ) ; case AfplibPackage . CPD__ENC_SCHEME : return getEncScheme ( ) ; } return super . eGet ( featureID , resolve , coreType ) ;
|
public class AbstractAuthLogicHandler { /** * Sends an HTTP request .
* @ param nextFilter the next filter
* @ param request the request to write
* @ throws ProxyAuthException */
protected void writeRequest ( final NextFilter nextFilter , final HttpProxyRequest request ) throws ProxyAuthException { } }
|
logger . debug ( " sending HTTP request" ) ; ( ( AbstractHttpLogicHandler ) proxyIoSession . getHandler ( ) ) . writeRequest ( nextFilter , request ) ;
|
public class JDBC4CallableStatement { /** * Sets the designated parameter to the given input stream . */
@ Override public void setAsciiStream ( String parameterName , InputStream x ) throws SQLException { } }
|
checkClosed ( ) ; throw SQLError . noSupport ( ) ;
|
public class ContentCryptoMaterial { /** * Decrypts the secured CEK via KMS ; involves network calls .
* @ return the CEK ( in plaintext ) . */
private static SecretKey cekByKMS ( byte [ ] cekSecured , String keyWrapAlgo , EncryptionMaterials materials , ContentCryptoScheme contentCryptoScheme , AWSKMS kms ) { } }
|
DecryptRequest kmsreq = new DecryptRequest ( ) . withEncryptionContext ( materials . getMaterialsDescription ( ) ) . withCiphertextBlob ( ByteBuffer . wrap ( cekSecured ) ) ; DecryptResult result = kms . decrypt ( kmsreq ) ; return new SecretKeySpec ( copyAllBytesFrom ( result . getPlaintext ( ) ) , contentCryptoScheme . getKeyGeneratorAlgorithm ( ) ) ;
|
public class DwgFile { /** * TODO : Gesti � n de capas pendiente . . . */
public String getLayerName ( DwgObject entity ) { } }
|
String layerName = "" ; int layer = entity . getLayerHandle ( ) ; for ( int j = 0 ; j < layerTable . size ( ) ; j ++ ) { Vector layerTableRecord = ( Vector ) layerTable . get ( j ) ; int lHandle = ( ( Integer ) layerTableRecord . get ( 0 ) ) . intValue ( ) ; if ( lHandle == layer ) { layerName = ( String ) layerTableRecord . get ( 1 ) ; } } /* * workaround for the cases in which the entity
* can ' t define it ' s own layer name : assign all the
* objects to the layer 0 */
if ( layerName . equals ( "" ) ) return "0" ; return layerName ;
|
public class WriterFactoryImpl { /** * { @ inheritDoc } */
public AnnotationTypeRequiredMemberWriter getAnnotationTypeRequiredMemberWriter ( AnnotationTypeWriter annotationTypeWriter ) throws Exception { } }
|
return new AnnotationTypeRequiredMemberWriterImpl ( ( SubWriterHolderWriter ) annotationTypeWriter , annotationTypeWriter . getAnnotationTypeDoc ( ) ) ;
|
public class UtilizationCollectorCached { /** * Make connection to the Collector */
protected void connect ( ) { } }
|
LOG . info ( "Connecting to collector..." ) ; try { conf . setStrings ( UnixUserGroupInformation . UGI_PROPERTY_NAME , new String [ ] { "hadoop" , "hadoop" } ) ; rpcCollector = ( UtilizationCollectorProtocol ) RPC . getProxy ( UtilizationCollectorProtocol . class , UtilizationCollectorProtocol . versionID , UtilizationCollector . getAddress ( conf ) , conf ) ; } catch ( IOException e ) { LOG . error ( "Cannot connect to UtilizationCollector server. Retry in " + DEFAULT_MIRROR_PERIOD + " milliseconds." ) ; return ; } LOG . info ( "Connection established" ) ;
|
public class MfpThreadDataImpl { /** * Set the ' protocol version ' of the connection partner for any current encode
* on this thread . The value is a Comparable , which is actually an instance
* of com . ibm . ws . sib . comms . ProtocolVersion .
* This method has package access as it is called by JsMessageObject */
static void setPartnerLevel ( Comparable level ) { } }
|
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "setPartnerLevel" , level ) ; partnerLevel . set ( level ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "setPartnerLevel" ) ;
|
public class FTPClient { /** * implement GridFTP v2 SCKS command as described in
* < a href = " http : / / www . ogf . org / documents / GFD . 47 . pdf " > GridFTP v2 Protocol Description < / a >
* < pre >
* 5.2 SCKS
* This command is sent prior to upload command such as STOR , ESTO , PUT . It is used
* to convey to the server that the checksum value for the file which is about to be
* uploaded . At the end of transfer , server will calculate checksum for the received file ,
* and if it does not match , will consider the transfer to have failed . Syntax of the
* command is :
* SCKS & lt ; algorithm & gt ; & lt ; value & gt ; CRLF
* Actual format of checksum value depends on the algorithm used , but generally ,
* hexadecimal representation should be used .
* < / pre >
* @ param algorithm
* @ param value
* @ throws org . globus . ftp . exception . ClientException
* @ throws org . globus . ftp . exception . ServerException
* @ throws java . io . IOException */
public void setChecksum ( String algorithm , String value ) throws ClientException , ServerException , IOException { } }
|
// check if we the cksum commands and specific algorithm are supported
checkCksumSupport ( algorithm ) ; // form CKSM command
String parameters = String . format ( "%s %s" , algorithm , value ) ; Command cmd = new Command ( "SCKS" , parameters ) ; // transfer command , obtain reply
Reply cksumReply = doCommand ( cmd ) ; // check for error
if ( ! Reply . isPositiveCompletion ( cksumReply ) ) { throw new ServerException ( ServerException . SERVER_REFUSED , cksumReply . getMessage ( ) ) ; } return ;
|
public class Log4JLogger { /** * from interface Logger . Factory */
public Logger getLogger ( String name ) { } }
|
return new Impl ( org . apache . log4j . Logger . getLogger ( name ) ) ;
|
public class ReflectionHelper { /** * 根据class类型返回默认值值
* @ param cl
* @ return */
public static Object getDefaultValue ( Class cl ) { } }
|
if ( cl . isArray ( ) ) { // 处理数组
return Array . newInstance ( cl . getComponentType ( ) , 0 ) ; } else if ( cl . isPrimitive ( ) || primitiveValueMap . containsKey ( cl ) ) { // 处理原型
return primitiveValueMap . get ( cl ) ; } else { return newInstance ( cl ) ; // return null ;
}
|
public class FnInteger { /** * Determines whether the target object and the specified object are NOT equal
* by calling the < tt > equals < / tt > method on the target object .
* @ param object the { @ link Integer } to compare to the target
* @ return false if both objects are equal , true if not . */
public static final Function < Integer , Boolean > notEq ( final Integer object ) { } }
|
return ( Function < Integer , Boolean > ) ( ( Function ) FnObject . notEq ( object ) ) ;
|
public class AppsImpl { /** * Gets all the available custom prebuilt domains for all cultures .
* @ param serviceCallback the async ServiceCallback to handle successful and failed responses .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the { @ link ServiceFuture } object */
public ServiceFuture < List < PrebuiltDomain > > listAvailableCustomPrebuiltDomainsAsync ( final ServiceCallback < List < PrebuiltDomain > > serviceCallback ) { } }
|
return ServiceFuture . fromResponse ( listAvailableCustomPrebuiltDomainsWithServiceResponseAsync ( ) , serviceCallback ) ;
|
public class MapTileTransitionModel { /** * Update tile .
* @ param resolved The resolved tiles .
* @ param toResolve Tiles to resolve after .
* @ param tile The tile reference .
* @ param ox The horizontal offset to update .
* @ param oy The vertical offset to update . */
private void updateTile ( Collection < Tile > resolved , Collection < Tile > toResolve , Tile tile , int ox , int oy ) { } }
|
final int tx = tile . getInTileX ( ) ; final int ty = tile . getInTileY ( ) ; final Tile neighbor = map . getTile ( tx + ox , ty + oy ) ; if ( neighbor != null ) { updateNeigbor ( resolved , toResolve , tile , neighbor , ox , oy ) ; }
|
public class HllSketchBuildAggregator { /** * This method is synchronized because it can be used during indexing ,
* and Druid can call aggregate ( ) and get ( ) concurrently .
* See https : / / github . com / druid - io / druid / pull / 3956 */
@ Override public void aggregate ( ) { } }
|
final Object value = selector . getObject ( ) ; if ( value == null ) { return ; } synchronized ( this ) { updateSketch ( sketch , value ) ; }
|
public class ExperimentHelper { /** * This function will clone the original management events based on given
* new planting dates . The generated event will keep the original days after
* planting date ( DAP ) .
* For example : New event date = New planting date + original DAP .
* @ param data The HashMap of experiment ( including weather data )
* @ param pdates The given planting dates for generation
* @ return Several groups of { @ code ArrayList } of { @ code Event } for each
* planting date in the experiment duration . */
public static ArrayList < ArrayList < HashMap < String , String > > > getAutoEventDate ( Map data , String [ ] pdates ) { } }
|
ArrayList < ArrayList < HashMap < String , String > > > results = new ArrayList < ArrayList < HashMap < String , String > > > ( ) ; // If no more planting event is required
if ( pdates . length < 1 ) { LOG . error ( "There is no PDATE can be used for event generation." ) ; return results ; } // Get Event date
ArrayList < HashMap < String , String > > events = MapUtil . getBucket ( data , "management" ) . getDataList ( ) ; while ( results . size ( ) < pdates . length ) { results . add ( new ArrayList ( ) ) ; } String orgPdate = getValueOr ( data , "origin_pdate" , "-99" ) ; if ( orgPdate . equals ( "" ) ) { LOG . error ( "The original PDATE is missing, can't calculate other event date" ) ; } else if ( orgPdate . equals ( "-99" ) ) { orgPdate = getFstPdate ( data , "" ) ; } if ( orgPdate . equals ( "" ) ) { LOG . warn ( "The original PDATE is missing, use first given PDATE {} as original one" , pdates [ 0 ] ) ; orgPdate = pdates [ 0 ] ; } else { LOG . debug ( "Find original PDATE {}" , orgPdate ) ; } for ( HashMap < String , String > event : events ) { // Get days after planting for current event date
String date = getValueOr ( event , "date" , "" ) ; String orgDap = "" ; String eventType = getValueOr ( event , "event" , "unknown" ) ; // if it is a planting event
if ( eventType . equals ( "planting" ) ) { orgDap = "0" ; } else { if ( date . equals ( "" ) ) { LOG . debug ( "Original {} event has an invalid date: [{}]." , eventType , date ) ; } else { orgDap = calcDAP ( date , orgPdate ) ; LOG . debug ( "Original {} event date: [{}]." , eventType , date ) ; LOG . debug ( "Original {} event's DAP: [{}]" , eventType , orgDap ) ; } } // Special handling for edate
String edate = getValueOr ( event , "edate" , "" ) ; String orgEDap = "" ; if ( ! edate . equals ( "" ) ) { orgEDap = calcDAP ( edate , orgPdate ) ; LOG . debug ( "Original EDATE's DAP: [{}]." , orgDap ) ; } for ( int j = 0 ; j < pdates . length ; j ++ ) { HashMap < String , String > newEvent = new HashMap ( ) ; newEvent . putAll ( event ) ; if ( ! date . equals ( "" ) ) { newEvent . put ( "date" , dateOffset ( pdates [ j ] , orgDap ) ) ; } if ( ! edate . equals ( "" ) ) { newEvent . put ( "edate" , dateOffset ( pdates [ j ] , orgEDap ) ) ; } results . get ( j ) . add ( newEvent ) ; } } return results ;
|
public class LogicTransformer { /** * sets up the parent - > child transformations map */
private void initialize ( ) { } }
|
// these pairs will be transformed
addTransformationPair ( GroupElement . NOT , new NotOrTransformation ( ) ) ; addTransformationPair ( GroupElement . EXISTS , new ExistOrTransformation ( ) ) ; addTransformationPair ( GroupElement . AND , new AndOrTransformation ( ) ) ;
|
public class CaptureSearchResult { /** * Add a flag to { @ code robotflags } field .
* If { @ code flag } is already set , this is a no - op .
* @ param flag a flag to add ( don ' t put multiple flags ) . */
public void setRobotFlag ( String flag ) { } }
|
String flags = getRobotFlags ( ) ; if ( flags == null ) { flags = "" ; } if ( ! flags . contains ( flag ) ) { flags = flags + flag ; } setRobotFlags ( flags ) ;
|
public class MigrateDefinition { /** * { @ inheritDoc }
* Migrates the file specified in the config to the configured path . If the destination path is a
* directory , the file is migrated inside that directory . */
@ Override public SerializableVoid runTask ( MigrateConfig config , ArrayList < MigrateCommand > commands , RunTaskContext context ) throws Exception { } }
|
WriteType writeType = config . getWriteType ( ) == null ? ServerConfiguration . getEnum ( PropertyKey . USER_FILE_WRITE_TYPE_DEFAULT , WriteType . class ) : WriteType . valueOf ( config . getWriteType ( ) ) ; for ( MigrateCommand command : commands ) { migrate ( command , writeType . toProto ( ) , config . isDeleteSource ( ) , context . getFileSystem ( ) ) ; } // Try to delete the source directory if it is empty .
if ( config . isDeleteSource ( ) && ! hasFiles ( new AlluxioURI ( config . getSource ( ) ) , context . getFileSystem ( ) ) ) { try { LOG . debug ( "Deleting {}" , config . getSource ( ) ) ; context . getFileSystem ( ) . delete ( new AlluxioURI ( config . getSource ( ) ) , DeletePOptions . newBuilder ( ) . setRecursive ( true ) . build ( ) ) ; } catch ( FileDoesNotExistException e ) { // It ' s already deleted , possibly by another worker .
} } return null ;
|
public class SqlTimestamp { /** * get the value of that named property */
@ Override public Object getMember ( String name ) { } }
|
switch ( name ) { case "after" : return F_after ; case "before" : return F_before ; case "compareTo" : return F_compareTo ; case "equals" : return F_equals ; case "getNanos" : return F_getNanos ; case "getTime" : return F_getTime ; case "hashCode" : return F_hashCode ; case "setNanos" : return F_setNanos ; case "setTime" : return F_setTime ; } return super . getMember ( name ) ;
|
public class DPathUtils { /** * Extract a date value from the target object using DPath expression . If the extracted value is
* a string , parse it as a { @ link Date } using the specified date - time format .
* @ param target
* @ param dPath
* @ param dateTimeFormat
* see { @ link SimpleDateFormat }
* @ return
* @ since 0.6.2 */
public static Date getDate ( Object target , String dPath , String dateTimeFormat ) { } }
|
Object obj = getValue ( target , dPath ) ; return ValueUtils . convertDate ( obj , dateTimeFormat ) ;
|
public class HexDecoder { /** * Decodes two nibbles in given input array and returns the decoded octet .
* @ param input the input array of nibbles .
* @ param inoff the offset in the array .
* @ return the decoded octet . */
public static int decodeSingle ( final byte [ ] input , final int inoff ) { } }
|
if ( input == null ) { throw new NullPointerException ( "input" ) ; } if ( input . length < 2 ) { // not required by ( inoff > = input . length - 1 ) checked below
throw new IllegalArgumentException ( "input.length(" + input . length + ") < 2" ) ; } if ( inoff < 0 ) { throw new IllegalArgumentException ( "inoff(" + inoff + ") < 0" ) ; } if ( inoff >= input . length - 1 ) { throw new IllegalArgumentException ( "inoff(" + inoff + ") >= input.length(" + input . length + ") - 1" ) ; } return ( decodeHalf ( input [ inoff ] & 0xFF ) << 4 ) | decodeHalf ( input [ inoff + 1 ] & 0xFF ) ;
|
public class MarkerRulerAction { /** * Checks a Position in a document to see whether the line of last mouse
* activity falls within this region .
* @ param position
* Position of the marker
* @ param document
* the Document the marker resides in
* @ return true if the last mouse click falls on the same line as the marker */
protected boolean includesRulerLine ( Position position , IDocument document ) { } }
|
if ( position != null && ruler != null ) { try { int markerLine = document . getLineOfOffset ( position . getOffset ( ) ) ; int line = ruler . getLineOfLastMouseButtonActivity ( ) ; if ( line == markerLine ) { return true ; } } catch ( BadLocationException x ) { FindbugsPlugin . getDefault ( ) . logException ( x , "Error getting marker line" ) ; } } return false ;
|
public class EhcacheBase { /** * { @ inheritDoc } */
@ Override public void putAll ( Map < ? extends K , ? extends V > entries ) throws BulkCacheWritingException { } }
|
putAllObserver . begin ( ) ; try { statusTransitioner . checkAvailable ( ) ; checkNonNull ( entries ) ; if ( entries . isEmpty ( ) ) { putAllObserver . end ( PutAllOutcome . SUCCESS ) ; return ; } try { doPutAll ( entries ) ; putAllObserver . end ( PutAllOutcome . SUCCESS ) ; } catch ( StoreAccessException e ) { resilienceStrategy . putAllFailure ( entries , e ) ; putAllObserver . end ( PutAllOutcome . FAILURE ) ; } } catch ( Exception e ) { putAllObserver . end ( PutAllOutcome . FAILURE ) ; throw e ; }
|
public class CmsSecurityManager { /** * Gets all access control entries . < p >
* @ param context the current request context
* @ return the list of all access control entries
* @ throws CmsException if something goes wrong */
public List < CmsAccessControlEntry > getAllAccessControlEntries ( CmsRequestContext context ) throws CmsException { } }
|
CmsDbContext dbc = m_dbContextFactory . getDbContext ( context ) ; List < CmsAccessControlEntry > result = null ; try { result = m_driverManager . getAllAccessControlEntries ( dbc ) ; } catch ( Exception e ) { dbc . report ( null , Messages . get ( ) . container ( Messages . ERR_GET_ACL_ENTRIES_1 , "<all resources>" ) , e ) ; } finally { dbc . clear ( ) ; } return result ;
|
public class CommsServerByteBufferPool { /** * Gets a CommsString from the pool . Any CommsString returned
* will be initially null .
* @ return CommsString */
@ Override public synchronized CommsServerByteBuffer allocate ( ) { } }
|
if ( tc . isEntryEnabled ( ) ) SibTr . entry ( this , tc , "allocate" ) ; CommsServerByteBuffer buff = ( CommsServerByteBuffer ) super . allocate ( ) ; if ( tc . isEntryEnabled ( ) ) SibTr . exit ( this , tc , "allocate" , buff ) ; return buff ;
|
public class AbstractPojoPathNavigator { /** * This method gets the { @ link PojoPathState } for the given { @ code context } .
* @ param initialPojoType is the initial pojo - type this { @ link PojoPathNavigator } was invoked with .
* @ param pojoPath is the { @ link net . sf . mmm . util . pojo . path . api . PojoPath } to navigate .
* @ param mode is the { @ link PojoPathMode mode } that determines how to deal with < em > unsafe < / em >
* { @ link net . sf . mmm . util . pojo . path . api . PojoPath } s .
* @ param context is the { @ link PojoPathContext context } for this operation .
* @ return the { @ link PojoPathState } or { @ code null } if caching is disabled . */
@ SuppressWarnings ( "rawtypes" ) protected PojoPathState createStateByType ( GenericType initialPojoType , String pojoPath , PojoPathMode mode , PojoPathContext context ) { } }
|
Class < ? > initialPojoClass = initialPojoType . getRetrievalClass ( ) ; Map < Object , Object > rawCache = context . getCache ( ) ; if ( rawCache == null ) { CachingPojoPath rootPath = new CachingPojoPath ( null , initialPojoClass , initialPojoType ) ; return new PojoPathState ( rootPath , mode , pojoPath ) ; } PojoPathCache masterCache = ( PojoPathCache ) rawCache . get ( initialPojoType ) ; if ( masterCache == null ) { masterCache = new PojoPathCache ( initialPojoClass , initialPojoType ) ; rawCache . put ( initialPojoType , masterCache ) ; } return masterCache . createState ( mode , pojoPath ) ;
|
public class DescribeCacheSecurityGroupsResult { /** * A list of cache security groups . Each element in the list contains detailed information about one group .
* < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use
* { @ link # setCacheSecurityGroups ( java . util . Collection ) } or { @ link # withCacheSecurityGroups ( java . util . Collection ) }
* if you want to override the existing values .
* @ param cacheSecurityGroups
* A list of cache security groups . Each element in the list contains detailed information about one group .
* @ return Returns a reference to this object so that method calls can be chained together . */
public DescribeCacheSecurityGroupsResult withCacheSecurityGroups ( CacheSecurityGroup ... cacheSecurityGroups ) { } }
|
if ( this . cacheSecurityGroups == null ) { setCacheSecurityGroups ( new com . amazonaws . internal . SdkInternalList < CacheSecurityGroup > ( cacheSecurityGroups . length ) ) ; } for ( CacheSecurityGroup ele : cacheSecurityGroups ) { this . cacheSecurityGroups . add ( ele ) ; } return this ;
|
public class CalendarForwardHandler { public Calendar adjustDate ( final Calendar startDate , final int increment , final NonWorkingDayChecker < Calendar > checker ) { } }
|
final Calendar cal = ( Calendar ) startDate . clone ( ) ; while ( checker . isNonWorkingDay ( cal ) ) { cal . add ( Calendar . DAY_OF_MONTH , increment ) ; } return cal ;
|
public class GremlinExpressionFactory { /** * Checks if the given expression is an alias expression , and if so
* returns the alias from the expression . Otherwise , null is
* returned . */
public String getAliasNameIfRelevant ( GroovyExpression expr ) { } }
|
if ( ! ( expr instanceof FunctionCallExpression ) ) { return null ; } FunctionCallExpression fc = ( FunctionCallExpression ) expr ; if ( ! fc . getFunctionName ( ) . equals ( AS_METHOD ) ) { return null ; } LiteralExpression aliasName = ( LiteralExpression ) fc . getArguments ( ) . get ( 0 ) ; return aliasName . getValue ( ) . toString ( ) ;
|
public class DomainMarshaller { /** * Marshall the given parameter object . */
public void marshall ( Domain domain , ProtocolMarshaller protocolMarshaller ) { } }
|
if ( domain == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( domain . getName ( ) , NAME_BINDING ) ; protocolMarshaller . marshall ( domain . getArn ( ) , ARN_BINDING ) ; protocolMarshaller . marshall ( domain . getSupportCode ( ) , SUPPORTCODE_BINDING ) ; protocolMarshaller . marshall ( domain . getCreatedAt ( ) , CREATEDAT_BINDING ) ; protocolMarshaller . marshall ( domain . getLocation ( ) , LOCATION_BINDING ) ; protocolMarshaller . marshall ( domain . getResourceType ( ) , RESOURCETYPE_BINDING ) ; protocolMarshaller . marshall ( domain . getTags ( ) , TAGS_BINDING ) ; protocolMarshaller . marshall ( domain . getDomainEntries ( ) , DOMAINENTRIES_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
|
public class NeuralNetworkParser { /** * 获取依存
* @ param deprels 依存列表
* @ param id 依存下标
* @ return 依存 */
int DEPREL ( final List < Integer > deprels , int id ) { } }
|
return ( ( id != - 1 ) ? ( deprels . get ( id ) + kDeprelInFeaturespace ) : kNilDeprel ) ;
|
public class DirectedMultigraph { /** * { @ inheritDoc } */
public int outDegree ( int vertex ) { } }
|
SparseDirectedTypedEdgeSet < T > edges = vertexToEdges . get ( vertex ) ; return ( edges == null ) ? 0 : edges . successors ( ) . size ( ) ;
|
public class EndPointInfoImpl { /** * { @ inheritDoc } */
@ Override protected final String getDescription ( MBeanAttributeInfo info ) { } }
|
String description = "Unknown attribute" ; if ( info != null ) { String operationName = info . getName ( ) ; if ( operationName != null ) { if ( operationName . equals ( "Name" ) ) { description = "Return the name of the endpoint." ; } else if ( operationName . equals ( "Host" ) ) { description = "Return the listening host name of the endpoint. A value of '*' means it is listening on all available host names." ; } else if ( operationName . equals ( "Port" ) ) { description = "Return the listening port of the endpoint." ; } } } return description ;
|
public class AipContentCensor { /** * 色情识别接口
* @ param imgPath 本地图片路径
* @ return JSONObject */
public JSONObject antiPorn ( String imgPath ) { } }
|
try { byte [ ] imgData = Util . readFileByBytes ( imgPath ) ; return antiPorn ( imgData ) ; } catch ( IOException e ) { return AipError . IMAGE_READ_ERROR . toJsonResult ( ) ; }
|
public class IconicsDrawable { /** * Set contour width from an dimen res for the icon
* @ return The current IconicsDrawable for chaining . */
@ NonNull public IconicsDrawable contourWidthRes ( @ DimenRes int sizeResId ) { } }
|
return contourWidthPx ( mContext . getResources ( ) . getDimensionPixelSize ( sizeResId ) ) ;
|
public class UpdateChargingStationReservableJsonCommandHandler { /** * { @ inheritDoc } */
@ Override public void handle ( String chargingStationId , JsonObject commandObject , IdentityContext identityContext ) throws UserIdentityUnauthorizedException { } }
|
try { ChargingStation chargingStation = repository . findOne ( chargingStationId ) ; if ( chargingStation != null && chargingStation . isAccepted ( ) ) { UpdateChargingStationReservableApiCommand command = gson . fromJson ( commandObject , UpdateChargingStationReservableApiCommand . class ) ; ChargingStationId csId = new ChargingStationId ( chargingStationId ) ; UserIdentity userIdentity = identityContext . getUserIdentity ( ) ; if ( command . isReservable ( ) ) { checkAuthorization ( csId , userIdentity , MakeChargingStationReservableCommand . class ) ; commandGateway . send ( new MakeChargingStationReservableCommand ( csId , identityContext ) ) ; } else { checkAuthorization ( csId , userIdentity , MakeChargingStationNotReservableCommand . class ) ; commandGateway . send ( new MakeChargingStationNotReservableCommand ( csId , identityContext ) ) ; } } } catch ( JsonSyntaxException e ) { throw new IllegalArgumentException ( "Update charging station reservable command not able to parse the payload, is your JSON correctly formatted?" , e ) ; }
|
public class ObjectMapperProvider { /** * checks if the member is to be filtered out or no if filter itself is
* null , writes out that member
* @ param member
* @ param includeFilter
* @ param taskObject
* @ param jsonGenerator
* @ throws JsonGenerationException
* @ throws IOException */
public static void filteredWrite ( String member , Predicate < String > includeFilter , Object taskObject , JsonGenerator jsonGenerator ) throws JsonGenerationException , IOException { } }
|
if ( includeFilter != null ) { if ( includeFilter . apply ( member ) ) { jsonGenerator . writeFieldName ( member ) ; jsonGenerator . writeObject ( taskObject ) ; } } else { jsonGenerator . writeFieldName ( member ) ; jsonGenerator . writeObject ( taskObject ) ; }
|
public class DebianParser { /** * / * - - - Private methods - - - */
private DependencyInfo createDependencyInfo ( Package packageInfo ) { } }
|
DependencyInfo dependencyInfo = null ; if ( StringUtils . isNotBlank ( packageInfo . getPackageName ( ) ) && StringUtils . isNotBlank ( packageInfo . getVersion ( ) ) && StringUtils . isNotBlank ( packageInfo . getArchitecture ( ) ) ) { if ( packageInfo . getVersion ( ) . contains ( Constants . PLUS ) ) { dependencyInfo = new DependencyInfo ( null , MessageFormat . format ( DEBIAN_PACKAGE_PATTERN , packageInfo . getPackageName ( ) , packageInfo . getVersion ( ) . substring ( 0 , packageInfo . getVersion ( ) . lastIndexOf ( Constants . PLUS ) ) , packageInfo . getArchitecture ( ) ) , packageInfo . getVersion ( ) ) ; } else { dependencyInfo = new DependencyInfo ( null , MessageFormat . format ( DEBIAN_PACKAGE_PATTERN , packageInfo . getPackageName ( ) , packageInfo . getVersion ( ) , packageInfo . getArchitecture ( ) ) , packageInfo . getVersion ( ) ) ; } } if ( dependencyInfo != null ) { return dependencyInfo ; } else { return null ; }
|
public class CmsPriorityDateResourceComparator { /** * Initializes the comparator key based on the member variables . < p >
* @ param resource the resource to use
* @ param cms the current OpenCms user contxt */
private void init ( CmsResource resource , CmsObject cms ) { } }
|
List < CmsProperty > properties ; try { properties = cms . readPropertyObjects ( resource , false ) ; } catch ( CmsException e ) { m_priority = 0 ; m_date = 0 ; return ; } try { m_priority = Integer . parseInt ( CmsProperty . get ( CmsPriorityResourceCollector . PROPERTY_PRIORITY , properties ) . getValue ( ) ) ; } catch ( NumberFormatException e ) { m_priority = CmsPriorityResourceCollector . PRIORITY_STANDARD ; } try { m_date = Long . parseLong ( CmsProperty . get ( PROPERTY_DATE , properties ) . getValue ( ) ) ; } catch ( NumberFormatException e ) { m_date = 0 ; }
|
public class ListAttributeDefinition { /** * Iterates through the elements in the { @ code parameter } list , calling { @ link # convertParameterElementExpressions ( ModelNode ) }
* for each .
* < strong > Note < / strong > that the default implementation of { @ link # convertParameterElementExpressions ( ModelNode ) }
* will only convert simple { @ link ModelType # STRING } elements . If users need to handle complex elements
* with embedded expressions , they should use a subclass that overrides that method .
* { @ inheritDoc } */
@ Override protected ModelNode convertParameterExpressions ( ModelNode parameter ) { } }
|
ModelNode result = parameter ; List < ModelNode > asList ; try { asList = parameter . asList ( ) ; } catch ( IllegalArgumentException iae ) { // We can ' t convert ; we ' ll just return parameter
asList = null ; } if ( asList != null ) { boolean changeMade = false ; ModelNode newList = new ModelNode ( ) . setEmptyList ( ) ; for ( ModelNode item : asList ) { ModelNode converted = convertParameterElementExpressions ( item ) ; newList . add ( converted ) ; changeMade |= ! converted . equals ( item ) ; } if ( changeMade ) { result = newList ; } } return result ;
|
public class CallTreeCallback { /** * Configures { @ link # callTreeLogTemplate } with a { @ link SplitThresholdLogTemplate } . */
private void initLogThreshold ( Long threshold ) { } }
|
this . logThreshold = threshold ; final LogTemplate < Split > toLogger = toSLF4J ( getClass ( ) . getName ( ) , "debug" ) ; if ( threshold == null ) { callTreeLogTemplate = toLogger ; } else { callTreeLogTemplate = whenSplitLongerThanMilliseconds ( toLogger , threshold ) ; }
|
public class BoundedBuffer { /** * Inserts an object into the expeditedBuffer . Note that
* since there is no synchronization , it is assumed
* that this is done outside the scope of this call . */
private final void expeditedInsert ( T t ) { } }
|
expeditedBuffer [ expeditedPutIndex ] = t ; if ( ++ expeditedPutIndex >= expeditedBuffer . length ) { expeditedPutIndex = 0 ; }
|
public class DrawerBuilder { /** * Define a custom Adapter which will be used in the drawer
* NOTE : this is not recommender
* WARNING : if you do this after adding items you will loose those !
* @ param adapter the FastAdapter to use with this drawer
* @ return this */
public DrawerBuilder withAdapter ( @ NonNull FastAdapter < IDrawerItem > adapter ) { } }
|
this . mAdapter = adapter ; // we have to rewrap as a different FastAdapter was provided
adapter . addAdapter ( 0 , mHeaderAdapter ) ; adapter . addAdapter ( 1 , mItemAdapter ) ; adapter . addAdapter ( 2 , mFooterAdapter ) ; adapter . addExtension ( mExpandableExtension ) ; return this ;
|
public class Gram { /** * Submits a GramJob to specified gatekeeper as
* a interactive or batch job . Performs limited delegation .
* @ throws GramException if an error occurs during submisson
* @ param resourceManagerContact resource manager contact
* @ param job gram job
* @ param batchJob true if batch job , interactive otherwise */
public static void request ( String resourceManagerContact , GramJob job , boolean batchJob ) throws GramException , GSSException { } }
|
request ( resourceManagerContact , job , batchJob , true ) ;
|
public class UpdateGroupRequestMarshaller { /** * Marshall the given parameter object . */
public void marshall ( UpdateGroupRequest updateGroupRequest , ProtocolMarshaller protocolMarshaller ) { } }
|
if ( updateGroupRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( updateGroupRequest . getGroupName ( ) , GROUPNAME_BINDING ) ; protocolMarshaller . marshall ( updateGroupRequest . getGroupARN ( ) , GROUPARN_BINDING ) ; protocolMarshaller . marshall ( updateGroupRequest . getFilterExpression ( ) , FILTEREXPRESSION_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
|
public class Transfer { /** * Method ProcessNextStep */
private void ProcessNextStep ( ) { } }
|
switch ( iSelectionStep ) { case SELECT_SOURCE_CATALOG : case SELECT_DEST_CATALOG : if ( CatalogToSelect ( ) ) { fMain . show ( ) ; return ; } break ; case SELECT_DEST_SCHEMA : case SELECT_SOURCE_SCHEMA : if ( SchemaToSelect ( ) ) { fMain . show ( ) ; return ; } break ; case SELECT_SOURCE_TABLES : if ( iTransferMode == TRFM_TRANSFER ) { bStart . setLabel ( "Start Transfer" ) ; } else if ( iTransferMode == TRFM_DUMP ) { bStart . setLabel ( "Start Dump" ) ; } else if ( iTransferMode == TRFM_RESTORE ) { bStart . setLabel ( "Start Restore" ) ; } bStart . invalidate ( ) ; bStart . setEnabled ( false ) ; lTable . setMultipleMode ( false ) ; RefreshMainDisplay ( ) ; break ; default : break ; }
|
public class CompareFixture { /** * Determines number of differences ( substrings that are not equal ) between two strings ,
* ignoring differences in whitespace .
* @ param first first string to compare .
* @ param second second string to compare .
* @ return number of different substrings . */
public int countDifferencesBetweenIgnoreWhitespaceAnd ( String first , String second ) { } }
|
String cleanFirst = allWhitespaceToSingleSpace ( first ) ; String cleanSecond = allWhitespaceToSingleSpace ( second ) ; return countDifferencesBetweenAnd ( cleanFirst , cleanSecond ) ;
|
public class GuavaUtils { /** * Like Guava ' s Enums . getIfPresent , with some differences .
* < ul >
* < li > Returns nullable rather than Optional < / li >
* < li > Does not require Guava 12 < / li >
* < / ul > */
@ Nullable public static < T extends Enum < T > > T getEnumIfPresent ( final Class < T > enumClass , final String value ) { } }
|
Preconditions . checkNotNull ( enumClass , "enumClass" ) ; Preconditions . checkNotNull ( value , "value" ) ; for ( T enumValue : enumClass . getEnumConstants ( ) ) { if ( enumValue . name ( ) . equals ( value ) ) { return enumValue ; } } return null ;
|
public class FileUploadUtils { /** * Get the filename of an uploaded file
* @ return the filename or null if not present */
public static String getOriginalFileName ( Part part ) { } }
|
String contentDisposition = part . getHeader ( "content-disposition" ) ; if ( contentDisposition != null ) { for ( String cd : contentDisposition . split ( ";" ) ) { if ( cd . trim ( ) . startsWith ( "filename" ) ) { String path = cd . substring ( cd . indexOf ( '=' ) + 1 ) . replaceAll ( "\"" , "" ) . trim ( ) ; Path filename = Paths . get ( path ) . getFileName ( ) ; return StringUtils . hasText ( filename . toString ( ) ) ? filename . toString ( ) : null ; } } } return null ;
|
public class JFapChannelFactory { /** * Creates a new channel . Uses channel configuration to determine if the channel should be
* inbound or outbound .
* @ see BaseChannelFactory # createChannel ( com . ibm . websphere . channelfw . ChannelData ) */
protected Channel createChannel ( ChannelData config ) throws ChannelException { } }
|
if ( tc . isEntryEnabled ( ) ) SibTr . entry ( this , tc , "createChannel" , config ) ; Channel retChannel ; if ( config . isInbound ( ) ) { if ( tc . isDebugEnabled ( ) ) SibTr . debug ( this , tc , "createChannel" , "inbound" ) ; try { Class clazz = Class . forName ( JFapChannelConstants . INBOUND_CHANNEL_CLASS ) ; Constructor contruct = clazz . getConstructor ( new Class [ ] { ChannelFactoryData . class , ChannelData . class } ) ; retChannel = ( Channel ) contruct . newInstance ( new Object [ ] { channelFactoryData , config } ) ; } catch ( Exception e ) { FFDCFilter . processException ( e , "com.ibm.ws.sib.jfapchannel.impl.JFapChannelFactory.createChannel" , JFapChannelConstants . JFAPCHANNELFACT_CREATECHANNEL_01 , this ) ; if ( tc . isDebugEnabled ( ) ) SibTr . debug ( this , tc , "Unable to instantiate inbound channel" , e ) ; // Rethrow as a channel exception
throw new ChannelException ( e ) ; } } else { retChannel = new JFapChannelOutbound ( channelFactoryData , config ) ; } if ( tc . isEntryEnabled ( ) ) SibTr . exit ( this , tc , "createChannel" , retChannel ) ; return retChannel ;
|
public class RestClientUtil { /** * 删除索引文档
* @ param path / twitter / _ doc / 1
* / twitter / _ doc / 1 ? routing = kimchy
* / twitter / _ doc / 1 ? timeout = 5m
* @ return */
public String deleteByPath ( String path ) throws ElasticSearchException { } }
|
try { return this . client . executeHttp ( path , ClientUtil . HTTP_DELETE ) ; } catch ( ElasticSearchException e ) { return ResultUtil . hand404HttpRuntimeException ( e , String . class , ResultUtil . OPERTYPE_deleteDocument ) ; }
|
public class ArrayUtil { /** * convert a primitive array ( value type ) to Object Array ( reference type ) .
* @ param primArr value type Array
* @ return reference type Array */
public static Character [ ] toReferenceType ( char [ ] primArr ) { } }
|
Character [ ] refArr = new Character [ primArr . length ] ; for ( int i = 0 ; i < primArr . length ; i ++ ) refArr [ i ] = new Character ( primArr [ i ] ) ; return refArr ;
|
public class SecStrucCalc { /** * Test if two groups are forming an H - Bond . The bond tested is
* from the CO of group i to the NH of group j . Acceptor ( i ) and
* donor ( j ) . The donor of i has to be j , and the acceptor of j
* has to be i .
* DSSP defines H - Bonds if the energy < - 500 cal / mol .
* @ param i group one
* @ param j group two
* @ return flag if the two are forming an Hbond */
private boolean isBonded ( int i , int j ) { } }
|
SecStrucState one = getSecStrucState ( i ) ; SecStrucState two = getSecStrucState ( j ) ; double don1e = one . getDonor1 ( ) . getEnergy ( ) ; double don2e = one . getDonor2 ( ) . getEnergy ( ) ; double acc1e = two . getAccept1 ( ) . getEnergy ( ) ; double acc2e = two . getAccept2 ( ) . getEnergy ( ) ; int don1p = one . getDonor1 ( ) . getPartner ( ) ; int don2p = one . getDonor2 ( ) . getPartner ( ) ; int acc1p = two . getAccept1 ( ) . getPartner ( ) ; int acc2p = two . getAccept2 ( ) . getPartner ( ) ; // Either donor from i is j , or accept from j is i
boolean hbond = ( don1p == j && don1e < HBONDHIGHENERGY ) || ( don2p == j && don2e < HBONDHIGHENERGY ) || ( acc1p == i && acc1e < HBONDHIGHENERGY ) || ( acc2p == i && acc2e < HBONDHIGHENERGY ) ; if ( hbond ) { logger . debug ( "*** H-bond from CO of {} to NH of {}" , i , j ) ; return true ; } return false ;
|
public class GBSIterator { /** * Find the next key in the index after an eof condition .
* < p > The vno from the time of the eof condition is stored in
* _ current1 . If the vno in the index has not changed then the
* iterator is still stuck at the end and there is nothing to do . If
* the vno in the index has changed then we do an optimistic search to
* re - establish position followed by a pessimistic search if the
* optimistic search failed . < / p > */
private void findNextAfterEof ( DeleteStack stack ) { } }
|
if ( ! _eof ) throw new RuntimeException ( "findNextAfterEof called when _eof false." ) ; if ( _current1 . _vno != _index . vno ( ) ) { boolean state = pessimisticNeeded ; state = optimisticSearchNext ( stack ) ; if ( state == pessimisticNeeded ) pessimisticSearchNext ( stack ) ; }
|
public class DescribeRetentionConfigurationsRequest { /** * A list of names of retention configurations for which you want details . If you do not specify a name , AWS Config
* returns details for all the retention configurations for that account .
* < note >
* Currently , AWS Config supports only one retention configuration per region in your account .
* < / note >
* @ return A list of names of retention configurations for which you want details . If you do not specify a name , AWS
* Config returns details for all the retention configurations for that account . < / p > < note >
* Currently , AWS Config supports only one retention configuration per region in your account . */
public java . util . List < String > getRetentionConfigurationNames ( ) { } }
|
if ( retentionConfigurationNames == null ) { retentionConfigurationNames = new com . amazonaws . internal . SdkInternalList < String > ( ) ; } return retentionConfigurationNames ;
|
public class TransactionImpl { /** * Indicate that the prepare XA phase failed . */
protected void setPrepareXAFailed ( ) // d266464A
{ } }
|
if ( tc . isEntryEnabled ( ) ) Tr . entry ( tc , "setPrepareXAFailed" ) ; setRBO ( ) ; // Ensure native context is informed
if ( tc . isEntryEnabled ( ) ) Tr . exit ( tc , "setPrepareXAFailed" ) ;
|
public class LetterNode { /** * Adds a word and its related value .
* @ param word
* keyword
* @ param value
* related value */
public void add ( final String word , final V value ) { } }
|
char c = word . charAt ( 0 ) ; LetterNode < V > node = get ( c ) ; if ( node == null ) { node = new LetterNode < V > ( this . word + c ) ; } this . nodes . put ( c , node ) ; if ( word . length ( ) == 1 ) { node . isKeyword = true ; node . value = value ; return ; } node . add ( word . substring ( 1 ) , value ) ;
|
public class RestServiceExceptionFacade { /** * Create a response message as a JSON - String from the given parts .
* @ param status is the HTTP { @ link Status } .
* @ param message is the JSON message attribute .
* @ param code is the { @ link NlsRuntimeException # getCode ( ) error code } .
* @ param uuid the { @ link UUID } of the response message .
* @ param errorsMap is a map with all validation errors
* @ return the corresponding { @ link Response } . */
protected Response createResponse ( Status status , String message , String code , UUID uuid , Map < String , List < String > > errorsMap ) { } }
|
String json = createJsonErrorResponseMessage ( message , code , uuid , errorsMap ) ; return Response . status ( status ) . entity ( json ) . build ( ) ;
|
public class AbstractQueryProtocol { /** * Execute a unique clientPrepareQuery .
* @ param mustExecuteOnMaster was intended to be launched on master connection
* @ param results results
* @ param clientPrepareResult clientPrepareResult
* @ param parameters parameters
* @ throws SQLException exception */
public void executeQuery ( boolean mustExecuteOnMaster , Results results , final ClientPrepareResult clientPrepareResult , ParameterHolder [ ] parameters ) throws SQLException { } }
|
cmdPrologue ( ) ; try { if ( clientPrepareResult . getParamCount ( ) == 0 && ! clientPrepareResult . isQueryMultiValuesRewritable ( ) ) { if ( clientPrepareResult . getQueryParts ( ) . size ( ) == 1 ) { ComQuery . sendDirect ( writer , clientPrepareResult . getQueryParts ( ) . get ( 0 ) ) ; } else { ComQuery . sendMultiDirect ( writer , clientPrepareResult . getQueryParts ( ) ) ; } } else { writer . startPacket ( 0 ) ; ComQuery . sendSubCmd ( writer , clientPrepareResult , parameters , - 1 ) ; writer . flush ( ) ; } getResult ( results ) ; } catch ( SQLException queryException ) { throw logQuery . exceptionWithQuery ( parameters , queryException , clientPrepareResult ) ; } catch ( IOException e ) { throw handleIoException ( e ) ; }
|
public class DatabaseAccountsInner { /** * Changes the failover priority for the Azure Cosmos DB database account . A failover priority of 0 indicates a write region . The maximum value for a failover priority = ( total number of regions - 1 ) . Failover priority values must be unique for each of the regions in which the database account exists .
* @ param resourceGroupName Name of an Azure resource group .
* @ param accountName Cosmos DB database account name .
* @ param failoverPolicies List of failover policies .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the observable for the request */
public Observable < Void > failoverPriorityChangeAsync ( String resourceGroupName , String accountName , List < FailoverPolicy > failoverPolicies ) { } }
|
return failoverPriorityChangeWithServiceResponseAsync ( resourceGroupName , accountName , failoverPolicies ) . map ( new Func1 < ServiceResponse < Void > , Void > ( ) { @ Override public Void call ( ServiceResponse < Void > response ) { return response . body ( ) ; } } ) ;
|
public class FlowTypeCheck { /** * Check the type of a given variable access . This is straightforward since the
* determine is fully determined by the declared type for the variable in
* question .
* @ param expr
* @ return */
private SemanticType checkVariable ( Expr . VariableAccess expr , Environment environment ) { } }
|
Decl . Variable var = expr . getVariableDeclaration ( ) ; return environment . getType ( var ) ;
|
public class AmazonWorkMailClient { /** * Allows the administrator to reset the password for a user .
* @ param resetPasswordRequest
* @ return Result of the ResetPassword operation returned by the service .
* @ throws DirectoryServiceAuthenticationFailedException
* The directory service doesn ' t recognize the credentials supplied by WorkMail .
* @ throws DirectoryUnavailableException
* The directory on which you are trying to perform operations isn ' t available .
* @ throws EntityNotFoundException
* The identifier supplied for the user , group , or resource does not exist in your organization .
* @ throws EntityStateException
* You are performing an operation on a user , group , or resource that isn ' t in the expected state , such as
* trying to delete an active user .
* @ throws InvalidParameterException
* One or more of the input parameters don ' t match the service ' s restrictions .
* @ throws InvalidPasswordException
* The supplied password doesn ' t match the minimum security constraints , such as length or use of special
* characters .
* @ throws OrganizationNotFoundException
* An operation received a valid organization identifier that either doesn ' t belong or exist in the system .
* @ throws OrganizationStateException
* The organization must have a valid state ( Active or Synchronizing ) to perform certain operations on the
* organization or its members .
* @ throws UnsupportedOperationException
* You can ' t perform a write operation against a read - only directory .
* @ sample AmazonWorkMail . ResetPassword
* @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / workmail - 2017-10-01 / ResetPassword " target = " _ top " > AWS API
* Documentation < / a > */
@ Override public ResetPasswordResult resetPassword ( ResetPasswordRequest request ) { } }
|
request = beforeClientExecution ( request ) ; return executeResetPassword ( request ) ;
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.