signature
stringlengths 43
39.1k
| implementation
stringlengths 0
450k
|
|---|---|
public class RespondActivityTaskCompletedRequestMarshaller { /** * Marshall the given parameter object . */
public void marshall ( RespondActivityTaskCompletedRequest respondActivityTaskCompletedRequest , ProtocolMarshaller protocolMarshaller ) { } }
|
if ( respondActivityTaskCompletedRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( respondActivityTaskCompletedRequest . getTaskToken ( ) , TASKTOKEN_BINDING ) ; protocolMarshaller . marshall ( respondActivityTaskCompletedRequest . getResult ( ) , RESULT_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
|
public class RestfulServer { /** * Register a group of providers . These could be Resource Providers ( classes implementing { @ link IResourceProvider } ) or " plain " providers , or a mixture of the two .
* @ param theProviders a { @ code Collection } of theProviders . The parameter could be null or an empty { @ code Collection } */
public void registerProviders ( Object ... theProviders ) { } }
|
Validate . noNullElements ( theProviders ) ; registerProviders ( Arrays . asList ( theProviders ) ) ;
|
public class PolicyAssignmentsInner { /** * Creates a policy assignment .
* Policy assignments are inherited by child resources . For example , when you apply a policy to a resource group that policy is assigned to all resources in the group .
* @ param scope The scope of the policy assignment .
* @ param policyAssignmentName The name of the policy assignment .
* @ param parameters Parameters for the policy assignment .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the observable to the PolicyAssignmentInner object */
public Observable < PolicyAssignmentInner > createAsync ( String scope , String policyAssignmentName , PolicyAssignmentInner parameters ) { } }
|
return createWithServiceResponseAsync ( scope , policyAssignmentName , parameters ) . map ( new Func1 < ServiceResponse < PolicyAssignmentInner > , PolicyAssignmentInner > ( ) { @ Override public PolicyAssignmentInner call ( ServiceResponse < PolicyAssignmentInner > response ) { return response . body ( ) ; } } ) ;
|
public class CodeBuilderFactory { /** * Create a synthetic resource .
* @ param resourceSet the resourceSet .
* @ return the resource . */
@ Pure protected Resource createResource ( ResourceSet resourceSet ) { } }
|
URI uri = computeUnusedUri ( resourceSet ) ; Resource resource = getResourceFactory ( ) . createResource ( uri ) ; resourceSet . getResources ( ) . add ( resource ) ; return resource ;
|
public class CacheHandler { /** * MSI end */
public void addRules ( RuleHandler ruleHandler ) { } }
|
ruleHandler . addRule ( "display-name" , new DisplayNameHandler ( ) ) ; ruleHandler . addRule ( "description" , new DescriptionHandler ( ) ) ; ruleHandler . addRule ( "cache-instance" , new CacheInstanceHandler ( ) ) ; // MSI
ruleHandler . addRule ( "cache-entry" , new CacheEntryHandler ( this ) ) ; ruleHandler . addRule ( "group" , new GroupHandler ( ) ) ; ruleHandler . addRule ( "skip-cache-attribute" , new SkipCacheAttributeHandler ( this ) ) ;
|
public class HeartAbleConnectionListener { /** * 重置心跳发送序号
* @ param conn */
protected final void resetHeartSeq ( JConnection conn ) { } }
|
if ( conn . hasAttribute ( IdleConnectionKey . HeartConfig ) ) { // 重置心跳序号
HeartConfig hc = ( HeartConfig ) conn . getAttribute ( IdleConnectionKey . HeartConfig ) ; hc . setSeq ( 0 ) ; }
|
public class RythmConfiguration { /** * Return { @ link RythmConfigurationKey # CODEGEN _ BYTE _ CODE _ ENHANCER } without lookup
* @ return the byte code enhancer implementation */
public IByteCodeEnhancer byteCodeEnhancer ( ) { } }
|
if ( IByteCodeEnhancer . INSTS . NULL == _byteCodeEnhancer ) { _byteCodeEnhancer = get ( CODEGEN_BYTE_CODE_ENHANCER ) ; } return _byteCodeEnhancer ;
|
public class AbstractIoBuffer { /** * { @ inheritDoc } */
@ Override public final IoBuffer putLong ( int index , long value ) { } }
|
autoExpand ( index , 8 ) ; buf ( ) . putLong ( index , value ) ; return this ;
|
public class BeanDefinitionParser { /** * parsePropertySubElement .
* @ param ele a { @ link org . w3c . dom . Element } object .
* @ param bd a { @ link org . springframework . beans . factory . config . BeanDefinition } object .
* @ return a { @ link java . lang . Object } object . */
public Object parsePropertySubElement ( Element ele , BeanDefinition bd ) { } }
|
return parsePropertySubElement ( ele , bd , null ) ;
|
public class DocClient { /** * delete a Document .
* @ param documentId The document id .
* @ return A DeleteDocumentResponse object containing the information returned by Document . */
public DeleteDocumentResponse deleteDocument ( String documentId ) { } }
|
DeleteDocumentRequest request = new DeleteDocumentRequest ( ) ; request . setDocumentId ( documentId ) ; return this . deleteDocument ( request ) ;
|
public class ListManagementTermListsImpl { /** * Updates an Term List .
* @ param listId List Id of the image list .
* @ param contentType The content type .
* @ param bodyParameter Schema of the body .
* @ param serviceCallback the async ServiceCallback to handle successful and failed responses .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the { @ link ServiceFuture } object */
public ServiceFuture < TermList > updateAsync ( String listId , String contentType , BodyModel bodyParameter , final ServiceCallback < TermList > serviceCallback ) { } }
|
return ServiceFuture . fromResponse ( updateWithServiceResponseAsync ( listId , contentType , bodyParameter ) , serviceCallback ) ;
|
public class RetryHandler { /** * Run the specified method , retrying on failure .
* @ param runner JUnit test runner
* @ param method test method to be run
* @ param notifier run notifier through which events are published
* @ param maxRetry maximum number of retry attempts */
static void runChildWithRetry ( Object runner , final FrameworkMethod method , RunNotifier notifier , int maxRetry ) { } }
|
boolean doRetry = false ; Statement statement = invoke ( runner , "methodBlock" , method ) ; Description description = invoke ( runner , "describeChild" , method ) ; AtomicInteger count = new AtomicInteger ( maxRetry ) ; do { EachTestNotifier eachNotifier = new EachTestNotifier ( notifier , description ) ; eachNotifier . fireTestStarted ( ) ; try { statement . evaluate ( ) ; doRetry = false ; } catch ( AssumptionViolatedException thrown ) { doRetry = doRetry ( method , thrown , count ) ; if ( doRetry ) { description = RetriedTest . proxyFor ( description , thrown ) ; eachNotifier . fireTestIgnored ( ) ; } else { eachNotifier . addFailedAssumption ( thrown ) ; } } catch ( Throwable thrown ) { doRetry = doRetry ( method , thrown , count ) ; if ( doRetry ) { description = RetriedTest . proxyFor ( description , thrown ) ; eachNotifier . fireTestIgnored ( ) ; } else { eachNotifier . addFailure ( thrown ) ; } } finally { eachNotifier . fireTestFinished ( ) ; } } while ( doRetry ) ;
|
public class JpaControllerManagement { /** * ActionStatus updates are allowed mainly if the action is active . If the
* action is not active we accept further status updates if permitted so
* by repository configuration . In this case , only the values : Status . ERROR
* and Status . FINISHED are allowed . In the case of a DOWNLOAD _ ONLY action ,
* we accept status updates only once . */
private boolean isUpdatingActionStatusAllowed ( final JpaAction action , final JpaActionStatus actionStatus ) { } }
|
final boolean isIntermediateFeedback = ! FINISHED . equals ( actionStatus . getStatus ( ) ) && ! Status . ERROR . equals ( actionStatus . getStatus ( ) ) ; final boolean isAllowedByRepositoryConfiguration = ! repositoryProperties . isRejectActionStatusForClosedAction ( ) && isIntermediateFeedback ; final boolean isAllowedForDownloadOnlyActions = isDownloadOnly ( action ) && ! isIntermediateFeedback ; return action . isActive ( ) || isAllowedByRepositoryConfiguration || isAllowedForDownloadOnlyActions ;
|
public class StringUtils { /** * 自动填充
* 比如 : 待填充字符串 : Dd , 用x在其左侧填充成10位的字符串 ,
* 则为 : xxxxxDd
* 如果待字符串is null , 则返回null 。
* 如果填充长度is null , 或者小于原长度 , 则返回原待填充字符串 。
* 例如 : 待填充字符串 : Dd , 用x在其左侧填充成10位的字符串 , 则仍然为 : Dd
* 如果你有指定填充字符串 , 或者填充方向 , 我们会进行默认 。
* 填充字符串默认为 : 0 , 方向为 : 左 。
* 例如 : 待填充字符串 : Dd , 填充成10位的字符串 , 则为 : 00000Dd
* @ param source 待填充的字符串
* @ param length 填充后的长度
* @ param str 填充的字符 ( 串 )
* @ param isRight 是否在原字符串的右侧填充
* @ return 填充后的字符串 */
public static String autoFill ( String source , Integer length , String str , Boolean isRight ) { } }
|
// 初始化校验
if ( source == null || length == null || ( source + "" ) . length ( ) >= length ) return source + "" ; // 指定填充字符
if ( isEmpty ( str ) ) str = "0" ; // 指定填充方向
if ( isRight == null ) isRight = false ; // 字符填充长度
int count = ( source + "" ) . length ( ) ; StringBuilder sb = new StringBuilder ( length ) ; // 右填充
if ( isRight ) { sb . append ( source ) ; } // 字符填充
int size = length - count ; for ( int i = 0 ; i < size ; i ++ ) { sb . append ( str ) ; } // 左填充
if ( ! isRight ) { sb . append ( source ) ; } return sb . toString ( ) ;
|
public class CreateDlpJobRequest { /** * < code > . google . privacy . dlp . v2 . InspectJobConfig inspect _ job = 2 ; < / code > */
public com . google . privacy . dlp . v2 . InspectJobConfig getInspectJob ( ) { } }
|
if ( jobCase_ == 2 ) { return ( com . google . privacy . dlp . v2 . InspectJobConfig ) job_ ; } return com . google . privacy . dlp . v2 . InspectJobConfig . getDefaultInstance ( ) ;
|
public class BaseAction { /** * Creates a new instance of BaseAction .
* @ param actionKey The menu description key for this item . */
public void init ( String actionKey , ActionListener targetListener ) { } }
|
m_targetListener = targetListener ; m_actionKey = actionKey ; String text = BaseApplet . getSharedInstance ( ) . getString ( actionKey ) ; String desc = BaseApplet . getSharedInstance ( ) . getString ( actionKey + TIP ) ; ImageIcon icon = BaseApplet . getSharedInstance ( ) . loadImageIcon ( actionKey ) ; ActionManager . getActionManager ( ) . put ( actionKey , this ) ; this . putValue ( AbstractAction . NAME , text ) ; if ( desc != null ) if ( ! desc . equalsIgnoreCase ( actionKey + TIP ) ) this . putValue ( AbstractAction . SHORT_DESCRIPTION , desc ) ; if ( icon != null ) this . putValue ( AbstractAction . SMALL_ICON , icon ) ;
|
public class AbstractFolderTreeItemFactory { /** * This implementation is different , because the root object is also put into
* the item store . */
@ Nonnull public final ITEMTYPE createRoot ( ) { } }
|
final ITEMTYPE aItem = internalCreateRoot ( ) ; return addToItemStore ( aItem . getGlobalUniqueDataID ( ) , aItem ) ;
|
public class CommerceNotificationQueueEntryLocalServiceBaseImpl { /** * Creates a new commerce notification queue entry with the primary key . Does not add the commerce notification queue entry to the database .
* @ param commerceNotificationQueueEntryId the primary key for the new commerce notification queue entry
* @ return the new commerce notification queue entry */
@ Override @ Transactional ( enabled = false ) public CommerceNotificationQueueEntry createCommerceNotificationQueueEntry ( long commerceNotificationQueueEntryId ) { } }
|
return commerceNotificationQueueEntryPersistence . create ( commerceNotificationQueueEntryId ) ;
|
public class CredentialListReader { /** * Make the request to the Twilio API to perform the read .
* @ param client TwilioRestClient with which to make the request
* @ return CredentialList ResourceSet */
@ Override public ResourceSet < CredentialList > read ( final TwilioRestClient client ) { } }
|
return new ResourceSet < > ( this , client , firstPage ( client ) ) ;
|
public class PairAbstractionConverter { /** * Adds all of the abstraction definitions for which the given complex
* abstraction instance defines a temporal relation to the given complex
* abstraction definition .
* @ param complexAbstractionInstance
* a Protege complex abstraction < code > Instance < / code > .
* @ param cad
* a PROTEMPA < code > HighLevelAbstractionDefinition < / code >
* instance . */
private static void addComponentAbstractionDefinitions ( Instance pairAbstractionInstance , SequentialTemporalPatternDefinition pd , Map < Instance , TemporalExtendedPropositionDefinition > extendedParameterCache , ProtegeKnowledgeSourceBackend backend ) throws KnowledgeSourceReadException { } }
|
ConnectionManager cm = backend . getConnectionManager ( ) ; Instance relationInstance = ( Instance ) cm . getOwnSlotValue ( pairAbstractionInstance , cm . getSlot ( "withRelation" ) ) ; if ( relationInstance != null ) { Relation relation = Util . instanceToRelation ( relationInstance , cm , backend ) ; Instance lhs = ( Instance ) cm . getOwnSlotValue ( relationInstance , cm . getSlot ( "lhs" ) ) ; assert lhs != null : "lhs cannot be null" ; Instance rhs = ( Instance ) cm . getOwnSlotValue ( relationInstance , cm . getSlot ( "rhs" ) ) ; assert rhs != null : "rhs cannot be null" ; TemporalExtendedPropositionDefinition lhsDefinition = Util . instanceToTemporalExtendedPropositionDefinition ( lhs , backend ) ; extendedParameterCache . put ( lhs , lhsDefinition ) ; TemporalExtendedPropositionDefinition rhsDefinition = Util . instanceToTemporalExtendedPropositionDefinition ( rhs , backend ) ; extendedParameterCache . put ( rhs , rhsDefinition ) ; pd . setFirstTemporalExtendedPropositionDefinition ( lhsDefinition ) ; SubsequentTemporalExtendedPropositionDefinition related = new SubsequentTemporalExtendedPropositionDefinition ( relation , rhsDefinition ) ; pd . setSubsequentTemporalExtendedPropositionDefinitions ( new SubsequentTemporalExtendedPropositionDefinition [ ] { related } ) ; }
|
public class NetworkFilter { /** * { @ inheritDoc } */
@ Override public NetworkFilter or ( NetworkFilter otherFilter ) { } }
|
checkNotNull ( otherFilter , "Other filter must be not a null" ) ; evaluation = new OrEvaluation < > ( evaluation , otherFilter , NetworkMetadata :: getId ) ; return this ;
|
public class AbstractScanPlanNode { /** * When a project node is added to the top of the plan , we need to adjust
* the differentiator field of TVEs to reflect differences in the scan
* schema vs the storage schema of a table , so that fields with duplicate names
* produced by expanding " SELECT * " can resolve correctly .
* We recurse until we find either a join node or a scan node .
* For scan nodes , we need to reflect the difference between the
* storage order of columns produced by a subquery , and the columns
* that are actually projected ( via an inlined project ) from the scan ,
* since unused columns are typically omitted from the output schema
* of the scan .
* @ param tve */
@ Override public void adjustDifferentiatorField ( TupleValueExpression tve ) { } }
|
int storageIndex = tve . getColumnIndex ( ) ; Integer scanIndex = m_differentiatorMap . get ( storageIndex ) ; assert ( scanIndex != null ) ; tve . setDifferentiator ( storageIndex ) ;
|
public class ServletErrorReport { /** * This method determines if the error is initiated by an application or not .
* @ param rootEx the exception being tested
* @ return true if a nice friendly app error should be returned , false otherwise . */
private boolean isApplicationError ( Throwable rootEx , String pkgRoot ) { } }
|
if ( rootEx != null ) { StackTraceElement [ ] stackTrace = rootEx . getStackTrace ( ) ; if ( stackTrace != null && stackTrace . length > 0 ) { StackTraceElement rootThrower = stackTrace [ 0 ] ; String className = rootThrower . getClassName ( ) ; if ( className != null && ! ! ! className . startsWith ( pkgRoot ) ) { return true ; } } } return false ;
|
public class FixedLengthRecordMapper { /** * utility method to calculate field offsets used to extract fields from record . */
private int [ ] calculateOffsets ( final int [ ] lengths ) { } }
|
int [ ] offsets = new int [ lengths . length + 1 ] ; offsets [ 0 ] = 0 ; for ( int i = 0 ; i < lengths . length ; i ++ ) { offsets [ i + 1 ] = offsets [ i ] + lengths [ i ] ; } return offsets ;
|
public class ClassInfoField { /** * Set up the default screen control for this field .
* @ param itsLocation Location of this component on screen ( ie . , GridBagConstraint ) .
* @ param targetScreen Where to place this component ( ie . , Parent screen or GridBagLayout ) .
* @ param converter The converter to set the screenfield to .
* @ param iDisplayFieldDesc Display the label ? ( optional ) .
* @ param properties Extra properties
* @ return Return the component or ScreenField that is created for this field . */
public ScreenComponent setupDefaultView ( ScreenLoc itsLocation , ComponentParent targetScreen , Convert converter , int iDisplayFieldDesc , Map < String , Object > properties ) { } }
|
return this . setupTableLookup ( itsLocation , targetScreen , converter , iDisplayFieldDesc , this . makeReferenceRecord ( ) , ClassInfo . CLASS_NAME_KEY , ClassInfo . CLASS_NAME , true , true ) ;
|
public class JRebirth { /** * Run into the JRebirth Thread Pool [ JTP ] .
* Be careful this method can be called through any thread .
* @ param runnableName the name of the runnable for logging purpose
* @ param runnablePriority the priority to try to apply to the runnable
* @ param runnable the task to run */
public static void runIntoJTP ( final String runnableName , final PriorityLevel runnablePriority , final Runnable runnable ) { } }
|
runIntoJTP ( new JrbReferenceRunnable ( runnableName , runnablePriority , runnable ) ) ;
|
public class AbstractAWSSigner { /** * Returns the time offset in seconds . */
@ Deprecated protected int getTimeOffset ( SignableRequest < ? > request ) { } }
|
final int globleOffset = SDKGlobalTime . getGlobalTimeOffset ( ) ; return globleOffset == 0 ? request . getTimeOffset ( ) : globleOffset ;
|
public class UserBS { /** * Requisição para recuperar usuário .
* @ param token
* Token de identificação do usuário a ser recuperado .
* @ return UserRecoverRequest Requisição realizada . */
public UserRecoverRequest retrieveRecoverRequest ( String token ) { } }
|
UserRecoverRequest req = this . dao . exists ( token , UserRecoverRequest . class ) ; if ( req == null ) return null ; if ( req . getExpiration ( ) . getTime ( ) < System . currentTimeMillis ( ) ) return null ; if ( req . isUsed ( ) ) return null ; return req ;
|
public class ParserTokenStream { /** * Consumes all tokens until the token at the front of the stream is of one of the given types .
* @ param types The types to cause the stream to stop consuming
* @ return The list of tokens that were consumed . */
public List < ParserToken > consumeWhile ( ParserTokenType ... types ) { } }
|
List < ParserToken > tokens = new ArrayList < > ( ) ; while ( isOfType ( lookAheadType ( 0 ) , types ) ) { tokens . add ( consume ( ) ) ; } return tokens ;
|
public class PlatformDefaultImpl { /** * @ see Platform # setObject ( PreparedStatement , int , Object , int ) */
public void setObjectForStatement ( PreparedStatement ps , int index , Object value , int sqlType ) throws SQLException { } }
|
if ( ( sqlType == Types . LONGVARCHAR ) && ( value instanceof String ) ) { String s = ( String ) value ; ps . setCharacterStream ( index , new StringReader ( s ) , s . length ( ) ) ; } /* PATCH for BigDecimal truncation problem . Seems that several databases ( e . g . DB2 , Sybase )
has problem with BigDecimal fields if the sql - type was set . The problem was discussed here
http : / / nagoya . apache . org / eyebrowse / ReadMsg ? listName = ojb - user @ db . apache . org & msgNo = 14113
A better option will be
< snip >
else if ( ( value instanceof BigDecimal ) & & ( sqlType = = Types . DECIMAL
| | sqlType = = Types . NUMERIC ) )
ps . setObject ( index , value , sqlType ,
( ( BigDecimal ) value ) . scale ( ) ) ;
< / snip >
But this way maxDB / sapDB does not work correct , so we use the most flexible solution
and let the jdbc - driver handle BigDecimal objects by itself . */
else if ( sqlType == Types . DECIMAL || sqlType == Types . NUMERIC ) { ps . setObject ( index , value ) ; } else { // arminw : this method call is done very , very often , so we can improve performance
// by comment out this section
// if ( log . isDebugEnabled ( ) ) {
// log . debug ( " Default setObjectForStatement , sqlType = " + sqlType +
// " , value class = " + ( value = = null ? " NULL ! " : value . getClass ( ) . getName ( ) )
// + " , value = " + value ) ;
ps . setObject ( index , value , sqlType ) ; }
|
public class ClassInspector { /** * Gets all fields that are potential ' constants ' .
* @ param aClass the class to work from
* @ return all constants that could be found */
public List < ConstantField > getConstants ( Class < ? > aClass ) { } }
|
List < ConstantField > constants = new ArrayList < > ( ) ; List < Field > fields = Arrays . asList ( aClass . getDeclaredFields ( ) ) ; for ( Field field : fields ) { if ( Modifier . isStatic ( field . getModifiers ( ) ) && ! Modifier . isFinal ( field . getModifiers ( ) ) ) { if ( ! Modifier . isPublic ( field . getModifiers ( ) ) ) if ( tryMakeAccessible ) field . setAccessible ( true ) ; else continue ; constants . add ( processField ( aClass , field ) ) ; } } Iterator < ConstantField > itr = constants . iterator ( ) ; while ( itr . hasNext ( ) ) { if ( itr . next ( ) == null ) itr . remove ( ) ; } return constants ;
|
public class TwoDimensionalCounter { /** * Produces a new ConditionalCounter .
* @ return a new ConditionalCounter , where order of indices is reversed */
@ SuppressWarnings ( { } }
|
"unchecked" } ) public static < K1 , K2 > TwoDimensionalCounter < K2 , K1 > reverseIndexOrder ( TwoDimensionalCounter < K1 , K2 > cc ) { // they typing on the outerMF is violated a bit , but it ' ll work . . . .
TwoDimensionalCounter < K2 , K1 > result = new TwoDimensionalCounter < K2 , K1 > ( ( MapFactory ) cc . outerMF , ( MapFactory ) cc . innerMF ) ; for ( K1 key1 : cc . firstKeySet ( ) ) { ClassicCounter < K2 > c = cc . getCounter ( key1 ) ; for ( K2 key2 : c . keySet ( ) ) { double count = c . getCount ( key2 ) ; result . setCount ( key2 , key1 , count ) ; } } return result ;
|
public class AWSKMSClient { /** * Schedules the deletion of a customer master key ( CMK ) . You may provide a waiting period , specified in days ,
* before deletion occurs . If you do not provide a waiting period , the default period of 30 days is used . When this
* operation is successful , the key state of the CMK changes to < code > PendingDeletion < / code > . Before the waiting
* period ends , you can use < a > CancelKeyDeletion < / a > to cancel the deletion of the CMK . After the waiting period
* ends , AWS KMS deletes the CMK and all AWS KMS data associated with it , including all aliases that refer to it .
* < important >
* Deleting a CMK is a destructive and potentially dangerous operation . When a CMK is deleted , all data that was
* encrypted under the CMK is unrecoverable . To prevent the use of a CMK without deleting it , use < a > DisableKey < / a > .
* < / important >
* If you schedule deletion of a CMK from a < a
* href = " http : / / docs . aws . amazon . com / kms / latest / developerguide / key - store - overview . html " > custom key store < / a > , when
* the waiting period expires , < code > ScheduleKeyDeletion < / code > deletes the CMK from AWS KMS . Then AWS KMS makes a
* best effort to delete the key material from the associated AWS CloudHSM cluster . However , you might need to
* manually < a
* href = " http : / / docs . aws . amazon . com / kms / latest / developerguide / fix - keystore . html # fix - keystore - orphaned - key " > delete
* the orphaned key material < / a > from the cluster and its backups .
* You cannot perform this operation on a CMK in a different AWS account .
* For more information about scheduling a CMK for deletion , see < a
* href = " http : / / docs . aws . amazon . com / kms / latest / developerguide / deleting - keys . html " > Deleting Customer Master Keys < / a >
* in the < i > AWS Key Management Service Developer Guide < / i > .
* The result of this operation varies with the key state of the CMK . For details , see < a
* href = " http : / / docs . aws . amazon . com / kms / latest / developerguide / key - state . html " > How Key State Affects Use of a
* Customer Master Key < / a > in the < i > AWS Key Management Service Developer Guide < / i > .
* @ param scheduleKeyDeletionRequest
* @ return Result of the ScheduleKeyDeletion operation returned by the service .
* @ throws NotFoundException
* The request was rejected because the specified entity or resource could not be found .
* @ throws InvalidArnException
* The request was rejected because a specified ARN was not valid .
* @ throws DependencyTimeoutException
* The system timed out while trying to fulfill the request . The request can be retried .
* @ throws KMSInternalException
* The request was rejected because an internal exception occurred . The request can be retried .
* @ throws KMSInvalidStateException
* The request was rejected because the state of the specified resource is not valid for this request . < / p >
* For more information about how key state affects the use of a CMK , see < a
* href = " http : / / docs . aws . amazon . com / kms / latest / developerguide / key - state . html " > How Key State Affects Use of a
* Customer Master Key < / a > in the < i > AWS Key Management Service Developer Guide < / i > .
* @ sample AWSKMS . ScheduleKeyDeletion
* @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / kms - 2014-11-01 / ScheduleKeyDeletion " target = " _ top " > AWS API
* Documentation < / a > */
@ Override public ScheduleKeyDeletionResult scheduleKeyDeletion ( ScheduleKeyDeletionRequest request ) { } }
|
request = beforeClientExecution ( request ) ; return executeScheduleKeyDeletion ( request ) ;
|
public class WriteCommandInstruction { /** * ( non - Javadoc )
* @ see net . roboconf . core . commands . AbstractCommandInstruction # doValidate ( ) */
@ Override public List < ParsingError > doValidate ( ) { } }
|
List < ParsingError > result = new ArrayList < > ( ) ; if ( Utils . isEmptyOrWhitespaces ( this . filePath ) ) result . add ( new ParsingError ( ErrorCode . CMD_MISSING_TARGET_FILE , this . context . getCommandFile ( ) , this . line ) ) ; return result ;
|
public class Schema { /** * Return the class of the implementation type parameter I for this Schema .
* Used by generic code which deals with arbitrary schemas and their backing
* impl classes . Never returns null . */
public Class < I > getImplClass ( ) { } }
|
return _impl_class != null ? _impl_class : ( _impl_class = ReflectionUtils . findActualClassParameter ( this . getClass ( ) , 0 ) ) ;
|
public class CSSStyleDeclarationImpl { /** * Remove a property .
* @ param propertyName the property name
* @ return the removed property
* @ throws DOMException in case of error */
public String removeProperty ( final String propertyName ) throws DOMException { } }
|
if ( null == propertyName ) { return "" ; } for ( int i = 0 ; i < properties_ . size ( ) ; i ++ ) { final Property p = properties_ . get ( i ) ; if ( p != null && propertyName . equalsIgnoreCase ( p . getName ( ) ) ) { properties_ . remove ( i ) ; if ( p . getValue ( ) == null ) { return "" ; } return p . getValue ( ) . toString ( ) ; } } return "" ;
|
public class WebDriverHelper { /** * Waits until an element contains a specific text .
* @ param by
* method of identifying the element
* @ param text
* the element text to wait for
* @ param maximumSeconds
* the maximum number of seconds to wait for */
public void waitForElementToContainSpecificText ( final By by , final String text , final int maximumSeconds ) { } }
|
WebDriverWait wait = new WebDriverWait ( driver , maximumSeconds ) ; wait . until ( ExpectedConditions . textToBePresentInElement ( by , text ) ) ;
|
public class InternalXbaseWithAnnotationsParser { /** * InternalXbaseWithAnnotations . g : 6322:1 : entryRuleJvmWildcardTypeReference returns [ EObject current = null ] : iv _ ruleJvmWildcardTypeReference = ruleJvmWildcardTypeReference EOF ; */
public final EObject entryRuleJvmWildcardTypeReference ( ) throws RecognitionException { } }
|
EObject current = null ; EObject iv_ruleJvmWildcardTypeReference = null ; try { // InternalXbaseWithAnnotations . g : 6322:65 : ( iv _ ruleJvmWildcardTypeReference = ruleJvmWildcardTypeReference EOF )
// InternalXbaseWithAnnotations . g : 6323:2 : iv _ ruleJvmWildcardTypeReference = ruleJvmWildcardTypeReference EOF
{ if ( state . backtracking == 0 ) { newCompositeNode ( grammarAccess . getJvmWildcardTypeReferenceRule ( ) ) ; } pushFollow ( FOLLOW_1 ) ; iv_ruleJvmWildcardTypeReference = ruleJvmWildcardTypeReference ( ) ; state . _fsp -- ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { current = iv_ruleJvmWildcardTypeReference ; } match ( input , EOF , FOLLOW_2 ) ; if ( state . failed ) return current ; } } catch ( RecognitionException re ) { recover ( input , re ) ; appendSkippedTokens ( ) ; } finally { } return current ;
|
public class DateTimeFormatterBuilder { /** * Appends the text of a date - time field to the formatter .
* The text of the field will be output during a print .
* The value must be within the valid range of the field .
* If the value cannot be obtained then an exception will be thrown .
* If the field has no textual representation , then the numeric value will be used .
* The value will be printed as per the normal print of an integer value .
* Only negative numbers will be signed . No padding will be added .
* @ param field the field to append , not null
* @ param textStyle the text style to use , not null
* @ return this , for chaining , not null */
public DateTimeFormatterBuilder appendText ( TemporalField field , TextStyle textStyle ) { } }
|
Jdk8Methods . requireNonNull ( field , "field" ) ; Jdk8Methods . requireNonNull ( textStyle , "textStyle" ) ; appendInternal ( new TextPrinterParser ( field , textStyle , DateTimeTextProvider . getInstance ( ) ) ) ; return this ;
|
public class TypedStreamReader { /** * Method that allows reading contents of an attribute as an array
* of whitespace - separate tokens , decoded using specified decoder .
* @ return Number of tokens decoded , 0 if none found */
@ Override public int getAttributeAsArray ( int index , TypedArrayDecoder tad ) throws XMLStreamException { } }
|
if ( mCurrToken != START_ELEMENT ) { throw new IllegalStateException ( ErrorConsts . ERR_STATE_NOT_STELEM ) ; } return mAttrCollector . decodeValues ( index , tad , this ) ;
|
public class ObjectFactory { /** * Create an instance of { @ link JAXBElement } { @ code < } { @ link Item } { @ code > } } */
@ XmlElementDecl ( namespace = "http://schema.intuit.com/finance/v3" , name = "Item" , substitutionHeadNamespace = "http://schema.intuit.com/finance/v3" , substitutionHeadName = "IntuitObject" ) public JAXBElement < Item > createItem ( Item value ) { } }
|
return new JAXBElement < Item > ( _Item_QNAME , Item . class , null , value ) ;
|
public class TagAttributeImpl { /** * Create a ValueExpression , using this attribute ' s literal value and the passed expected type .
* @ see ExpressionFactory # createValueExpression ( javax . el . ELContext , java . lang . String , java . lang . Class )
* @ see ValueExpression
* @ param ctx
* FaceletContext to use
* @ param type
* expected return type
* @ return ValueExpression instance */
public ValueExpression getValueExpression ( FaceletContext ctx , Class type ) { } }
|
AbstractFaceletContext actx = ( AbstractFaceletContext ) ctx ; // volatile reads are atomic , so take the tuple to later comparison .
Object [ ] localCachedExpression = cachedExpression ; if ( actx . isAllowCacheELExpressions ( ) && localCachedExpression != null && localCachedExpression . length == 2 ) { // If the expected type is the same return the cached one
if ( localCachedExpression [ 0 ] == null && type == null ) { // If # { cc } recalculate the composite component level
if ( ( this . capabilities & EL_CC ) != 0 ) { return ( ( LocationValueExpression ) localCachedExpression [ 1 ] ) . apply ( actx . getFaceletCompositionContext ( ) . getCompositeComponentLevel ( ) ) ; } return ( ValueExpression ) localCachedExpression [ 1 ] ; } else if ( localCachedExpression [ 0 ] != null && localCachedExpression [ 0 ] . equals ( type ) ) { // If # { cc } recalculate the composite component level
if ( ( this . capabilities & EL_CC ) != 0 ) { return ( ( LocationValueExpression ) localCachedExpression [ 1 ] ) . apply ( actx . getFaceletCompositionContext ( ) . getCompositeComponentLevel ( ) ) ; } return ( ValueExpression ) localCachedExpression [ 1 ] ; } } actx . beforeConstructELExpression ( ) ; try { ExpressionFactory f = ctx . getExpressionFactory ( ) ; ValueExpression valueExpression = f . createValueExpression ( ctx , this . value , type ) ; if ( ExternalSpecifications . isUnifiedELAvailable ( ) ) { if ( actx . getFaceletCompositionContext ( ) . isWrapTagExceptionsAsContextAware ( ) ) { valueExpression = new ContextAwareTagValueExpressionUEL ( this , valueExpression ) ; } else { valueExpression = new TagValueExpressionUEL ( this , valueExpression ) ; } } else { if ( actx . getFaceletCompositionContext ( ) . isWrapTagExceptionsAsContextAware ( ) ) { valueExpression = new ContextAwareTagValueExpression ( this , valueExpression ) ; } else { valueExpression = new TagValueExpression ( this , valueExpression ) ; } } // if the ValueExpression contains a reference to the current composite
// component , the Location also has to be stored in the ValueExpression
// to be able to resolve the right composite component ( the one that was
// created from the file the Location is pointing to ) later .
// ( see MYFACES - 2561 for details )
if ( ( this . capabilities & EL_CC ) != 0 ) { if ( ExternalSpecifications . isUnifiedELAvailable ( ) ) { valueExpression = new LocationValueExpressionUEL ( getLocation ( ) , valueExpression , actx . getFaceletCompositionContext ( ) . getCompositeComponentLevel ( ) ) ; } else { valueExpression = new LocationValueExpression ( getLocation ( ) , valueExpression , actx . getFaceletCompositionContext ( ) . getCompositeComponentLevel ( ) ) ; } } else if ( ( this . capabilities & EL_RESOURCE ) != 0 ) { if ( ExternalSpecifications . isUnifiedELAvailable ( ) ) { valueExpression = new ResourceLocationValueExpressionUEL ( getLocation ( ) , valueExpression ) ; } else { valueExpression = new ResourceLocationValueExpression ( getLocation ( ) , valueExpression ) ; } } if ( actx . isAllowCacheELExpressions ( ) && ! actx . isAnyFaceletsVariableResolved ( ) ) { cachedExpression = new Object [ ] { type , valueExpression } ; } return valueExpression ; } catch ( Exception e ) { throw new TagAttributeException ( this , e ) ; } finally { actx . afterConstructELExpression ( ) ; }
|
public class Gmap3Dashboard { /** * region > notYetComplete ( derived collection ) */
@ MemberOrder ( sequence = "1" ) @ CollectionLayout ( render = RenderType . EAGERLY ) public List < Gmap3ToDoItem > getNotYetComplete ( ) { } }
|
return gmap3WicketToDoItems . notYetCompleteNoUi ( ) ;
|
public class ConnectedStreams { /** * Applies a CoFlatMap transformation on a { @ link ConnectedStreams } and
* maps the output to a common type . The transformation calls a
* { @ link CoFlatMapFunction # flatMap1 } for each element of the first input
* and { @ link CoFlatMapFunction # flatMap2 } for each element of the second
* input . Each CoFlatMapFunction call returns any number of elements
* including none .
* @ param coFlatMapper
* The CoFlatMapFunction used to jointly transform the two input
* DataStreams
* @ return The transformed { @ link DataStream } */
public < R > SingleOutputStreamOperator < R > flatMap ( CoFlatMapFunction < IN1 , IN2 , R > coFlatMapper ) { } }
|
TypeInformation < R > outTypeInfo = TypeExtractor . getBinaryOperatorReturnType ( coFlatMapper , CoFlatMapFunction . class , 0 , 1 , 2 , TypeExtractor . NO_INDEX , getType1 ( ) , getType2 ( ) , Utils . getCallLocationName ( ) , true ) ; return transform ( "Co-Flat Map" , outTypeInfo , new CoStreamFlatMap < > ( inputStream1 . clean ( coFlatMapper ) ) ) ;
|
public class BufferFieldTable { /** * Move the data source buffer to all the fields .
* < br / > < pre >
* Make sure you do the following steps :
* 1 ) Move the data fields to the correct record data fields , with mode Constants . READ _ MOVE .
* 2 ) Set the data source or set to null , so I can cache the source if necessary .
* 3 ) Save the objectID if it is not an Integer type , so I can serialize the source of this object .
* < / pre >
* Note : This is synchronized because VectorBuffer is not thread safe .
* @ exception Exception File exception .
* @ return Any error encountered moving the data . */
public synchronized int dataToFields ( Rec record ) throws DBException { } }
|
if ( this . getDataSource ( ) == null ) throw new DBException ( Constants . INVALID_RECORD ) ; ( ( BaseBuffer ) this . getDataSource ( ) ) . resetPosition ( ) ; return super . dataToFields ( record ) ;
|
public class PolicySetDefinitionsInner { /** * Retrieves all policy set definitions in management group .
* This operation retrieves a list of all the a policy set definition in the given management group .
* @ param managementGroupId The ID of the management group .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the observable to the PagedList & lt ; PolicySetDefinitionInner & gt ; object */
public Observable < Page < PolicySetDefinitionInner > > listByManagementGroupAsync ( final String managementGroupId ) { } }
|
return listByManagementGroupWithServiceResponseAsync ( managementGroupId ) . map ( new Func1 < ServiceResponse < Page < PolicySetDefinitionInner > > , Page < PolicySetDefinitionInner > > ( ) { @ Override public Page < PolicySetDefinitionInner > call ( ServiceResponse < Page < PolicySetDefinitionInner > > response ) { return response . body ( ) ; } } ) ;
|
public class Measure { /** * setter for normalizedValue - sets
* @ generated
* @ param v value to set into the feature */
public void setNormalizedValue ( float v ) { } }
|
if ( Measure_Type . featOkTst && ( ( Measure_Type ) jcasType ) . casFeat_normalizedValue == null ) jcasType . jcas . throwFeatMissing ( "normalizedValue" , "ch.epfl.bbp.uima.types.Measure" ) ; jcasType . ll_cas . ll_setFloatValue ( addr , ( ( Measure_Type ) jcasType ) . casFeatCode_normalizedValue , v ) ;
|
public class ClientFactory { /** * 删除用户事件
* @ param pUserName */
protected void onRemoveClient ( String pUserName , String pId , String pAddress ) { } }
|
if ( null != mClientFactoryListener ) { mClientFactoryListener . removeClient ( this , new RemoveClientEvent ( pUserName , pId , pAddress ) ) ; }
|
public class ClassifierCombiner { /** * Some basic testing of the ClassifierCombiner .
* @ param args Command - line arguments as properties : - loadClassifier1 serializedFile - loadClassifier2 serializedFile
* @ throws Exception If IO or serialization error loading classifiers */
public static void main ( String [ ] args ) throws Exception { } }
|
Properties props = StringUtils . argsToProperties ( args ) ; ClassifierCombiner ec = new ClassifierCombiner ( props ) ; System . err . println ( ec . classifyToString ( "Marketing : Sony Hopes to Win Much Bigger Market For Wide Range of Small-Video Products --- By Andrew B. Cohen Staff Reporter of The Wall Street Journal" ) ) ; // test _ mergeTwoDocumentsByLongestSequence ( " O O X O O " , " O Y Y Y O " ) ;
// test _ mergeTwoDocumentsByLongestSequence ( " O Y Y Y Y " , " O O X X X " ) ;
// test _ mergeTwoDocumentsByLongestSequence ( " X X X O Z Z Z Z Z " , " O Y Y Y Y O O O O " ) ;
// test _ mergeTwoDocumentsByLongestSequence ( " O Y Y Y Y O O O O " , " X X X O Z Z Z Z Z " ) ;
// test _ mergeTwoDocumentsByLongestSequence ( " O O O O O " , " O X X X O " ) ;
// test _ mergeTwoDocumentsByLongestSequence ( " O X X O O O " , " O Y Y Y Z O " ) ;
// test _ mergeTwoDocumentsByLongestSequence ( " O Y Y Y Z O " , " O X X O O O " ) ;
// test _ mergeTwoDocumentsByLongestSequence ( " O Y Y Y O O " , " O O X X Z O " ) ;
// test _ mergeTwoDocumentsByLongestSequence ( " O Y Y Y O O " , " O O X X Z Z " ) ;
// test _ mergeTwoDocumentsByLongestSequence ( " O X Y Z W " , " A B B D E " ) ;
// test _ mergeTwoDocumentsByLongestSequence ( " O X O O O " , " O O Y Y O " ) ;
// test _ mergeTwoDocumentsByLongestSequence ( " O O Y Y O " , " O X O O O " ) ;
// test _ mergeTwoDocumentsByLongestSequence ( " O X X O O " , " O O Y Y Y " ) ;
// test _ mergeTwoDocumentsByLongestSequence ( " O O Y Y Y " , " O X X O O " ) ;
|
public class MPP8Reader { /** * Clear transient member data . */
private void clearMemberData ( ) { } }
|
m_reader = null ; m_root = null ; m_eventManager = null ; m_file = null ; m_calendarMap = null ; m_projectDir = null ; m_viewDir = null ;
|
public class TransformerFactoryImpl { /** * Create an XMLFilter that uses the given source as the
* transformation instructions .
* @ param src The source of the transformation instructions .
* @ return An XMLFilter object , or null if this feature is not supported .
* @ throws TransformerConfigurationException */
public XMLFilter newXMLFilter ( Source src ) throws TransformerConfigurationException { } }
|
Templates templates = newTemplates ( src ) ; if ( templates == null ) return null ; return newXMLFilter ( templates ) ;
|
public class SortOrderTableHeaderCellRenderer { /** * Returns the sort priority of the specified column in the given
* table , where 0 means the highest priority , and - 1 means that
* the column is not sorted .
* @ param table The table
* @ param column The column
* @ return The sort priority */
private static int getSortPriority ( JTable table , int column ) { } }
|
List < ? extends SortKey > sortKeys = table . getRowSorter ( ) . getSortKeys ( ) ; for ( int i = 0 ; i < sortKeys . size ( ) ; i ++ ) { SortKey sortKey = sortKeys . get ( i ) ; if ( sortKey . getColumn ( ) == table . convertColumnIndexToModel ( column ) ) { return i ; } } return - 1 ;
|
public class UndefinedImpl { /** * returns the scope that contains a specific key
* @ param key
* @ return */
public Collection getScopeFor ( Collection . Key key , Scope defaultValue ) { } }
|
Object rtn = null ; if ( checkArguments ) { rtn = local . get ( key , NullSupportHelper . NULL ( ) ) ; if ( rtn != NullSupportHelper . NULL ( ) ) return local ; rtn = argument . getFunctionArgument ( key , NullSupportHelper . NULL ( ) ) ; if ( rtn != NullSupportHelper . NULL ( ) ) return argument ; } // get data from queries
if ( allowImplicidQueryCall && pc . getCurrentTemplateDialect ( ) == CFMLEngine . DIALECT_CFML && ! qryStack . isEmpty ( ) ) { QueryColumn qc = qryStack . getColumnFromACollection ( key ) ; if ( qc != null ) return ( Query ) qc . getParent ( ) ; } // variable
rtn = variable . get ( key , NullSupportHelper . NULL ( ) ) ; if ( rtn != NullSupportHelper . NULL ( ) ) { return variable ; } // thread scopes
if ( pc . hasFamily ( ) ) { Threads t = ( Threads ) pc . getThreadScope ( key , NullSupportHelper . NULL ( ) ) ; if ( rtn != NullSupportHelper . NULL ( ) ) return t ; } // get a scope value ( only cfml is searcing additional scopes )
if ( pc . getCurrentTemplateDialect ( ) == CFMLEngine . DIALECT_CFML ) { for ( int i = 0 ; i < scopes . length ; i ++ ) { rtn = scopes [ i ] . get ( key , NullSupportHelper . NULL ( ) ) ; if ( rtn != NullSupportHelper . NULL ( ) ) { return scopes [ i ] ; } } } return defaultValue ;
|
public class InferenceSpecification { /** * A list of the instance types that are used to generate inferences in real - time .
* @ param supportedRealtimeInferenceInstanceTypes
* A list of the instance types that are used to generate inferences in real - time .
* @ return Returns a reference to this object so that method calls can be chained together .
* @ see ProductionVariantInstanceType */
public InferenceSpecification withSupportedRealtimeInferenceInstanceTypes ( ProductionVariantInstanceType ... supportedRealtimeInferenceInstanceTypes ) { } }
|
java . util . ArrayList < String > supportedRealtimeInferenceInstanceTypesCopy = new java . util . ArrayList < String > ( supportedRealtimeInferenceInstanceTypes . length ) ; for ( ProductionVariantInstanceType value : supportedRealtimeInferenceInstanceTypes ) { supportedRealtimeInferenceInstanceTypesCopy . add ( value . toString ( ) ) ; } if ( getSupportedRealtimeInferenceInstanceTypes ( ) == null ) { setSupportedRealtimeInferenceInstanceTypes ( supportedRealtimeInferenceInstanceTypesCopy ) ; } else { getSupportedRealtimeInferenceInstanceTypes ( ) . addAll ( supportedRealtimeInferenceInstanceTypesCopy ) ; } return this ;
|
public class Directory { /** * Triggers the { @ link PathChangeListener # modified ( PathChangeEvent ) } on all listeners specified if the
* file represented by the path specified has been changed i . e . has a new checksum . If no checksum change
* has been detected , nothing happens .
* @ param pFile File which potentially has changed , must not be { @ code null } */
public void informIfChanged ( final EventDispatcher pDispatcher , final Directory pNewRootOrNull , final Path pFile , final boolean pIsCreated ) { } }
|
if ( pDispatcher . hasListeners ( ) ) { if ( pIsCreated ) { informCreatedOrInitial ( pDispatcher , pNewRootOrNull , pFile ) ; } else { getResource ( pFile ) . update ( getTimeout ( ) , update -> { if ( update . hasChanged ( ) ) { LOG . debug ( "Processing {} because {} has been changed" , update , pFile ) ; inform ( pDispatcher , pNewRootOrNull , pFile ) ; } else { LOG . debug ( "Ignored {} because {} has not been changed" , update , pFile ) ; } } ) ; } }
|
public class MatchExptScript { /** * Clear datasets , blockers , or learners . */
public void clear ( String what ) { } }
|
if ( what . equals ( "blockers" ) ) blockers . clear ( ) ; else if ( what . equals ( "datasets" ) ) datasets . clear ( ) ; else if ( what . equals ( "learners" ) ) learners . clear ( ) ; else if ( what . equals ( "all" ) ) { clear ( "blockers" ) ; clear ( "datasets" ) ; clear ( "learners" ) ; } else { System . out . println ( "usage: clear blockers|datasets|learners|all" ) ; }
|
public class Script { /** * Gets the count of P2SH Sig Ops in the Script scriptSig */
public static long getP2SHSigOpCount ( byte [ ] scriptSig ) throws ScriptException { } }
|
Script script = new Script ( ) ; try { script . parse ( scriptSig ) ; } catch ( ScriptException e ) { // Ignore errors and count up to the parse - able length
} for ( int i = script . chunks . size ( ) - 1 ; i >= 0 ; i -- ) if ( ! script . chunks . get ( i ) . isOpCode ( ) ) { Script subScript = new Script ( ) ; subScript . parse ( script . chunks . get ( i ) . data ) ; return getSigOpCount ( subScript . chunks , true ) ; } return 0 ;
|
public class GraphPath { /** * Remove the path ' s elements after the
* specified one which is starting
* at the specified point . The specified element will
* be removed .
* < p > This function removes after the < i > last occurence < / i >
* of the given object .
* @ param obj is the segment to remove
* @ param pt is the point on which the segment was connected
* as its first point .
* @ return < code > true < / code > on success , otherwise < code > false < / code > */
public boolean removeFromLast ( ST obj , PT pt ) { } }
|
return removeAfter ( lastIndexOf ( obj , pt ) , true ) ;
|
public class FormatterMojo { /** * sha512hash .
* @ param str
* the str
* @ return the string */
private String sha512hash ( String str ) { } }
|
return Hashing . sha512 ( ) . hashBytes ( str . getBytes ( getEncoding ( ) ) ) . toString ( ) ;
|
public class Swagger2MarkupProperties { /** * Return the MarkupLanguage property value associated with the given key , or
* { @ code defaultValue } if the key cannot be resolved .
* @ param key the property name to resolve
* @ return The MarkupLanguage property */
public Optional < MarkupLanguage > getMarkupLanguage ( String key ) { } }
|
Optional < String > property = getString ( key ) ; if ( property . isPresent ( ) ) { return Optional . of ( MarkupLanguage . valueOf ( property . get ( ) ) ) ; } else { return Optional . empty ( ) ; }
|
public class DBInstance { /** * Contains one or more identifiers of DB clusters that are Read Replicas of this DB instance .
* < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use
* { @ link # setReadReplicaDBClusterIdentifiers ( java . util . Collection ) } or
* { @ link # withReadReplicaDBClusterIdentifiers ( java . util . Collection ) } if you want to override the existing values .
* @ param readReplicaDBClusterIdentifiers
* Contains one or more identifiers of DB clusters that are Read Replicas of this DB instance .
* @ return Returns a reference to this object so that method calls can be chained together . */
public DBInstance withReadReplicaDBClusterIdentifiers ( String ... readReplicaDBClusterIdentifiers ) { } }
|
if ( this . readReplicaDBClusterIdentifiers == null ) { setReadReplicaDBClusterIdentifiers ( new java . util . ArrayList < String > ( readReplicaDBClusterIdentifiers . length ) ) ; } for ( String ele : readReplicaDBClusterIdentifiers ) { this . readReplicaDBClusterIdentifiers . add ( ele ) ; } return this ;
|
public class ApplicationWindowAwareCommand { /** * Returns the { @ link javax . swing . JFrame } of the application window that this command belongs to .
* @ return The control component of the application window , never null . */
protected JFrame getParentWindowControl ( ) { } }
|
// allow subclasses to derive where the application window comes from
final ApplicationWindow applicationWindow = getApplicationWindow ( ) ; if ( applicationWindow == null ) { return ValkyrieRepository . getInstance ( ) . getApplicationConfig ( ) . windowManager ( ) . getActiveWindow ( ) . getControl ( ) ; } return applicationWindow . getControl ( ) ;
|
public class StringUtil { /** * Takes a block of text which might have long lines in it and wraps
* the long lines based on the supplied wrapColumn parameter . It was
* initially implemented for use by VelocityEmail . If there are tabs
* in inString , you are going to get results that are a bit strange ,
* since tabs are a single character but are displayed as 4 or 8
* spaces . Remove the tabs .
* @ param inString Text which is in need of word - wrapping .
* @ param newline The characters that define a newline .
* @ param wrapColumn The column to wrap the words at .
* @ return The text with all the long lines word - wrapped . */
public static String wrapText ( final String inString , final String newline , final int wrapColumn ) { } }
|
if ( inString == null ) { return null ; } final StringTokenizer lineTokenizer = new StringTokenizer ( inString , newline , true ) ; final StringBuilder builder = new StringBuilder ( ) ; while ( lineTokenizer . hasMoreTokens ( ) ) { try { String nextLine = lineTokenizer . nextToken ( ) ; if ( nextLine . length ( ) > wrapColumn ) { // This line is long enough to be wrapped .
nextLine = wrapLine ( nextLine , newline , wrapColumn ) ; } builder . append ( nextLine ) ; } catch ( final NoSuchElementException nsee ) { // thrown by nextToken ( ) , but I don ' t know why it would
break ; } } return builder . toString ( ) ;
|
public class TableRow { /** * Add a span across columns
* @ param colIndex the index of the first column
* @ param n the number of columns in the span */
public void setColumnsSpanned ( final int colIndex , final int n ) { } }
|
if ( n <= 1 ) return ; final TableCell firstCell = this . getOrCreateCell ( colIndex ) ; if ( firstCell . isCovered ( ) ) // already spanned
return ; firstCell . markColumnsSpanned ( n ) ; this . coverRightCells ( colIndex , n ) ;
|
public class Bot { /** * Call this method to set the " Greeting Text " . A user sees this when it opens up the chat window for the
* first time . You can specify different messages for different locales . Therefore , this method receives an
* array of { @ code greeting } .
* See https : / / developers . facebook . com / docs / messenger - platform / discovery / welcome - screen for more .
* @ param greeting an array of Payload consisting of text and locale
* @ return response from facebook */
protected final ResponseEntity < Response > setGreetingText ( Payload [ ] greeting ) { } }
|
Event event = new Event ( ) . setGreeting ( greeting ) ; return restTemplate . postForEntity ( fbMessengerProfileUrl , event , Response . class ) ;
|
public class RebalanceUtils { /** * Confirms that any nodes from supersetCluster that are in subsetCluster
* have the same state ( i . e . , node id , host name , and ports ) . Specific
* partitions hosted are not compared .
* @ param subsetCluster
* @ param supersetCluster */
public static void validateClusterNodeState ( final Cluster subsetCluster , final Cluster supersetCluster ) { } }
|
if ( ! supersetCluster . getNodeIds ( ) . containsAll ( subsetCluster . getNodeIds ( ) ) ) { throw new VoldemortException ( "Superset cluster does not contain all nodes from subset cluster[ subset cluster node ids (" + subsetCluster . getNodeIds ( ) + ") are not a subset of superset cluster node ids (" + supersetCluster . getNodeIds ( ) + ") ]" ) ; } for ( Node subsetNode : subsetCluster . getNodes ( ) ) { Node supersetNode = supersetCluster . getNodeById ( subsetNode . getId ( ) ) ; if ( ! subsetNode . isEqualState ( supersetNode ) ) { throw new VoldemortException ( "Nodes do not have same state[ subset node state (" + subsetNode . getStateString ( ) + ") not equal to superset node state (" + supersetNode . getStateString ( ) + ") ]" ) ; } }
|
import java . util . List ; import java . util . stream . Collectors ; class ListMultiplier { /** * This function applies a multiplier to each element in a list using the map function .
* > > > list _ multiplier ( [ 1 , 2 , 3 , 4 , 5 , 6 , 7 ] , 3)
* [ 3 , 6 , 9 , 12 , 15 , 18 , 21]
* > > > list _ multiplier ( [ 1 , 2 , 3 , 4 , 5 , 6 , 7 ] , 4)
* [ 4 , 8 , 12 , 16 , 20 , 24 , 28]
* > > > list _ multiplier ( [ 1 , 2 , 3 , 4 , 5 , 6 , 7 ] , 10)
* [ 10 , 20 , 30 , 40 , 50 , 60 , 70] */
public static List < Integer > listMultiplier ( List < Integer > numbers , int multiplier ) { } }
|
return numbers . stream ( ) . map ( num -> num * multiplier ) . collect ( Collectors . toList ( ) ) ;
|
public class CleaneLingSolver { /** * Updates the glue value for a given clause .
* @ param c the clause */
private void updateGlue ( final CLClause c ) { } }
|
if ( ! this . config . glueupdate ) { return ; } if ( ! this . config . gluered ) { assert c . glue ( ) == 0 ; return ; } assert this . frames . empty ( ) ; for ( int i = 0 ; i < c . lits ( ) . size ( ) ; i ++ ) { markFrame ( c . lits ( ) . get ( i ) ) ; } final int newGlue = unmarkFrames ( ) ; if ( newGlue >= c . glue ( ) ) { return ; } c . setGlue ( newGlue ) ; this . stats . gluesSum += newGlue ; this . stats . gluesCount ++ ; this . stats . gluesUpdates ++ ;
|
public class KeyboardManager { /** * Called when Swing notifies us that a key has been pressed while the
* keyboard manager is active .
* @ return true to swallow the key event */
protected boolean keyPressed ( KeyEvent e ) { } }
|
logKey ( "keyPressed" , e ) ; // get the action command associated with this key
int keyCode = e . getKeyCode ( ) ; boolean hasCommand = _xlate . hasCommand ( keyCode ) ; if ( hasCommand ) { // get the info object for this key , creating one if necessary
KeyInfo info = _keys . get ( keyCode ) ; if ( info == null ) { info = new KeyInfo ( keyCode ) ; _keys . put ( keyCode , info ) ; } // remember the last time this key was pressed
info . setPressTime ( RunAnywhere . getWhen ( e ) ) ; } // notify any key observers of the key press
notifyObservers ( KeyEvent . KEY_PRESSED , e . getKeyCode ( ) , RunAnywhere . getWhen ( e ) ) ; return hasCommand ;
|
public class TextFileAuthentication { /** * Can be used to create entries in the users textfile
* @ param args username and password */
public static void main ( String [ ] args ) { } }
|
if ( args . length < 2 ) { System . err . println ( "Two arguments needed as username and password" ) ; System . exit ( - 1 ) ; } User user = new User ( ) ; user . name = args [ 0 ] ; user . password . setPassword ( args [ 1 ] . toCharArray ( ) ) ; for ( int i = 2 ; i < args . length ; i ++ ) { user . roles . add ( new UserRole ( args [ i ] . trim ( ) ) ) ; } System . out . println ( user . format ( ) ) ;
|
public class WTextField { /** * Set the value of the { @ code autocomplete } attribute for the current field .
* @ param autocompleteValue the value to set as a ( optionally space delimited list of ) String value ( s ) . */
protected void setAutocomplete ( final String autocompleteValue ) { } }
|
final String newValue = Util . empty ( autocompleteValue ) ? null : autocompleteValue ; if ( ! Util . equals ( newValue , getAutocomplete ( ) ) ) { getOrCreateComponentModel ( ) . autocomplete = newValue ; }
|
public class UIClientWebSocket { /** * When a UI client connects , this method is called . This will immediately send a welcome
* message to the UI client .
* @ param session the new UI client ' s session */
@ OnOpen public void uiClientSessionOpen ( Session session ) { } }
|
log . infoWsSessionOpened ( session . getId ( ) , endpoint ) ; wsEndpoints . getUiClientSessions ( ) . addSession ( session . getId ( ) , session ) ; WelcomeResponse welcomeResponse = new WelcomeResponse ( ) ; // FIXME we should not send the true sessionIds to clients to prevent spoofing .
welcomeResponse . setSessionId ( session . getId ( ) ) ; try { new WebSocketHelper ( ) . sendBasicMessageSync ( session , welcomeResponse ) ; } catch ( IOException e ) { log . warnf ( e , "Could not send [%s] to UI client session [%s]." , WelcomeResponse . class . getName ( ) , session . getId ( ) ) ; }
|
public class CmsLockManager { /** * Removes all locks of a user . < p >
* Edition and system locks are removed . < p >
* @ param userId the id of the user whose locks should be removed */
public void removeLocks ( CmsUUID userId ) { } }
|
Iterator < CmsLock > itLocks = OpenCms . getMemoryMonitor ( ) . getAllCachedLocks ( ) . iterator ( ) ; while ( itLocks . hasNext ( ) ) { CmsLock currentLock = itLocks . next ( ) ; boolean editLock = currentLock . getEditionLock ( ) . getUserId ( ) . equals ( userId ) ; boolean sysLock = currentLock . getSystemLock ( ) . getUserId ( ) . equals ( userId ) ; if ( editLock ) { unlockResource ( currentLock . getResourceName ( ) , false ) ; } if ( sysLock ) { unlockResource ( currentLock . getResourceName ( ) , true ) ; } }
|
public class GitlabAPI { /** * GET / projects / : id / repository / commits / : sha / diff */
public List < GitlabCommitDiff > getCommitDiffs ( Serializable projectId , String commitHash ) throws IOException { } }
|
return getCommitDiffs ( projectId , commitHash , new Pagination ( ) ) ;
|
public class DolphinPlatformApplication { /** * This method is called if the connection to the Dolphin Platform server throws an exception at runtime . This can
* for example happen if the server is shut down while the client is still running or if the server responses with
* an error code .
* @ param primaryStage the primary stage
* @ param runtimeException the exception */
protected void onRuntimeError ( final Stage primaryStage , final DolphinRuntimeException runtimeException ) { } }
|
Assert . requireNonNull ( runtimeException , "runtimeException" ) ; LOG . error ( "Dolphin Platform runtime error in thread " + runtimeException . getThread ( ) . getName ( ) , runtimeException ) ; Platform . exit ( ) ;
|
public class SimpleNamingStrategy { /** * Turns the name into a valid , simplified Java Identifier .
* @ param name input String
* @ return java identifier , based on input String . */
private String toJavaName ( String name ) { } }
|
StringBuilder stb = new StringBuilder ( ) ; char [ ] namechars = name . toCharArray ( ) ; if ( ! Character . isJavaIdentifierStart ( namechars [ 0 ] ) ) { stb . append ( "__" ) ; } else { stb . append ( namechars [ 0 ] ) ; } for ( int i = 1 ; i < namechars . length ; i ++ ) { if ( ! Character . isJavaIdentifierPart ( namechars [ i ] ) ) { stb . append ( "__" ) ; } else { stb . append ( namechars [ i ] ) ; } } return stb . toString ( ) ;
|
public class TopLevelGedDocumentMongoToGedObjectVisitor { /** * { @ inheritDoc } */
@ Override public final void visit ( final SourceDocumentMongo document ) { } }
|
gedObject = new Source ( parent , new ObjectId ( document . getString ( ) ) ) ;
|
public class PrivateZonesInner { /** * Updates a Private DNS zone . Does not modify virtual network links or DNS records within the zone .
* @ param resourceGroupName The name of the resource group .
* @ param privateZoneName The name of the Private DNS zone ( without a terminating dot ) .
* @ param parameters Parameters supplied to the Update operation .
* @ param ifMatch The ETag of the Private DNS zone . Omit this value to always overwrite the current zone . Specify the last - seen ETag value to prevent accidentally overwriting any concurrent changes .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ throws CloudException thrown if the request is rejected by server
* @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
* @ return the PrivateZoneInner object if successful . */
public PrivateZoneInner update ( String resourceGroupName , String privateZoneName , PrivateZoneInner parameters , String ifMatch ) { } }
|
return updateWithServiceResponseAsync ( resourceGroupName , privateZoneName , parameters , ifMatch ) . toBlocking ( ) . last ( ) . body ( ) ;
|
public class Task { /** * Executes the task in the given { @ link ComThread }
* @ param t the ComThread to execute the task
* @ return the return value of the Task execution ( returned by { @ link # call ( ) } ) . */
public final T execute ( ComThread t ) { } }
|
if ( Thread . currentThread ( ) == t ) // if invoked from within ComThread , execute it at once
return call ( ) ; else // otherwise schedule the execution and block
return t . execute ( this ) ;
|
public class GrassLegacyUtilities { /** * Returns the list of files involved in the raster map issues . If for example a map has to be
* deleted , then all these files have to .
* @ param mapsetPath - the path of the mapset
* @ param mapname - the name of the map
* @ return the array of strings containing the full path to the involved files */
public static boolean checkRasterMapConsistence ( String mapsetPath , String mapname ) { } }
|
File file = null ; File file2 = null ; file = new File ( mapsetPath + File . separator + GrassLegacyConstans . FCELL + File . separator + mapname ) ; file2 = new File ( mapsetPath + File . separator + GrassLegacyConstans . CELL + File . separator + mapname ) ; // the map is in one of the two
if ( ! file . exists ( ) && ! file2 . exists ( ) ) return false ; /* * helper files */
file = new File ( mapsetPath + File . separator + GrassLegacyConstans . CELLHD + File . separator + mapname ) ; if ( ! file . exists ( ) ) return false ; // it is important that the folder cell _ misc / mapname comes before the
// files in it
file = new File ( mapsetPath + File . separator + GrassLegacyConstans . CELL_MISC + File . separator + mapname ) ; if ( ! file . exists ( ) ) return false ; return true ;
|
public class FieldsAndGetters { /** * Dumps all fields and getters of { @ code obj } to { @ code printer } .
* @ see # dumpIf */
public static void dumpAll ( String name , Object obj , StringPrinter printer ) { } }
|
dumpIf ( name , obj , Predicates . alwaysTrue ( ) , Predicates . alwaysTrue ( ) , printer ) ;
|
public class WorkSheet { /** * * Combine two work sheets where you join based on rows . Rows that are
* found in one but not the other are removed . If the second sheet is meta
* data then a meta data column will be added between the two joined columns
* @ param w1
* @ param w2
* @ param secondSheetMetaData
* @ return
* @ throws Exception */
static public WorkSheet unionWorkSheetsRowJoin ( WorkSheet w1 , WorkSheet w2 , boolean secondSheetMetaData ) throws Exception { } }
|
ArrayList < String > w1Columns = w1 . getColumns ( ) ; ArrayList < String > w2Columns = w2 . getColumns ( ) ; ArrayList < String > w1DataColumns = w1 . getDataColumns ( ) ; ArrayList < String > w2DataColumns = w2 . getDataColumns ( ) ; ArrayList < String > w1MetaDataColumns = w1 . getMetaDataColumns ( ) ; ArrayList < String > w2MetaDataColumns = w2 . getMetaDataColumns ( ) ; if ( secondSheetMetaData ) { if ( ! w1 . getColumns ( ) . contains ( "META_DATA" ) ) { w1DataColumns . add ( "META_DATA" ) ; } } ArrayList < String > joinedColumns = new ArrayList < String > ( ) ; joinedColumns . addAll ( w1DataColumns ) ; joinedColumns . addAll ( w2DataColumns ) ; if ( ! joinedColumns . contains ( "META_DATA" ) && ( w1MetaDataColumns . size ( ) > 0 || w2MetaDataColumns . size ( ) > 0 ) ) { joinedColumns . add ( "META_DATA" ) ; } for ( String column : w1MetaDataColumns ) { if ( ! joinedColumns . contains ( column ) ) { joinedColumns . add ( column ) ; } } for ( String column : w2MetaDataColumns ) { if ( ! joinedColumns . contains ( column ) ) { joinedColumns . add ( column ) ; } } ArrayList < String > w1Rows = w1 . getRows ( ) ; ArrayList < String > w2Rows = w2 . getRows ( ) ; ArrayList < String > rows = new ArrayList < String > ( ) ; HashSet < String > w1Key = new HashSet < String > ( w1Rows ) ; for ( String key : w2Rows ) { if ( w1Key . contains ( key ) ) { rows . add ( key ) ; } } WorkSheet worksheet = new WorkSheet ( rows , joinedColumns ) ; for ( String row : rows ) { for ( String column : w1Columns ) { if ( column . equals ( "META_DATA" ) ) { continue ; } String value = w1 . getCell ( row , column ) ; worksheet . addCell ( row , column , value ) ; } } for ( String row : rows ) { for ( String column : w2Columns ) { if ( column . equals ( "META_DATA" ) ) { continue ; } String value = w2 . getCell ( row , column ) ; worksheet . addCell ( row , column , value ) ; } } worksheet . setMetaDataColumnsAfterColumn ( ) ; worksheet . setMetaDataRowsAfterRow ( ) ; return worksheet ;
|
public class ButterKnifeProcessor { /** * Returns the first duplicate element inside an array , null if there are no duplicates . */
private static @ Nullable Integer findDuplicate ( int [ ] array ) { } }
|
Set < Integer > seenElements = new LinkedHashSet < > ( ) ; for ( int element : array ) { if ( ! seenElements . add ( element ) ) { return element ; } } return null ;
|
public class CPSpecificationOptionPersistenceImpl { /** * Returns the cp specification option where groupId = & # 63 ; and key = & # 63 ; or throws a { @ link NoSuchCPSpecificationOptionException } if it could not be found .
* @ param groupId the group ID
* @ param key the key
* @ return the matching cp specification option
* @ throws NoSuchCPSpecificationOptionException if a matching cp specification option could not be found */
@ Override public CPSpecificationOption findByG_K ( long groupId , String key ) throws NoSuchCPSpecificationOptionException { } }
|
CPSpecificationOption cpSpecificationOption = fetchByG_K ( groupId , key ) ; if ( cpSpecificationOption == null ) { StringBundler msg = new StringBundler ( 6 ) ; msg . append ( _NO_SUCH_ENTITY_WITH_KEY ) ; msg . append ( "groupId=" ) ; msg . append ( groupId ) ; msg . append ( ", key=" ) ; msg . append ( key ) ; msg . append ( "}" ) ; if ( _log . isDebugEnabled ( ) ) { _log . debug ( msg . toString ( ) ) ; } throw new NoSuchCPSpecificationOptionException ( msg . toString ( ) ) ; } return cpSpecificationOption ;
|
public class UriEscape { /** * Perform am URI query parameter ( name or value ) < strong > unescape < / strong > operation
* on a < tt > Reader < / tt > input , writing results to a < tt > Writer < / tt > .
* This method will unescape every percent - encoded ( < tt > % HH < / tt > ) sequences present in input ,
* even for those characters that do not need to be percent - encoded in this context ( unreserved characters
* can be percent - encoded even if / when this is not required , though it is not generally considered a
* good practice ) .
* This method will use specified < tt > encoding < / tt > in order to determine the characters specified in the
* percent - encoded byte sequences .
* This method is < strong > thread - safe < / strong > .
* @ param reader the < tt > Reader < / tt > reading the text to be unescaped .
* @ param writer the < tt > java . io . Writer < / tt > to which the unescaped result will be written . Nothing will
* be written at all to this writer if input is < tt > null < / tt > .
* @ param encoding the encoding to be used for unescaping .
* @ throws IOException if an input / output exception occurs
* @ since 1.1.2 */
public static void unescapeUriQueryParam ( final Reader reader , final Writer writer , final String encoding ) throws IOException { } }
|
if ( writer == null ) { throw new IllegalArgumentException ( "Argument 'writer' cannot be null" ) ; } if ( encoding == null ) { throw new IllegalArgumentException ( "Argument 'encoding' cannot be null" ) ; } UriEscapeUtil . unescape ( reader , writer , UriEscapeUtil . UriEscapeType . QUERY_PARAM , encoding ) ;
|
public class RequestBaratineImpl { /** * Starts an upgrade of the HTTP request to a protocol on raw TCP . */
@ Override public void upgrade ( Object protocol ) { } }
|
Objects . requireNonNull ( protocol ) ; if ( protocol instanceof ServiceWebSocket ) { ServiceWebSocket < ? , ? > webSocket = ( ServiceWebSocket < ? , ? > ) protocol ; upgradeWebSocket ( webSocket ) ; } else { throw new IllegalArgumentException ( protocol . toString ( ) ) ; }
|
public class WaitFor { /** * Waits up to the provided wait time for a prompt present on the page has content equal to the
* expected text . This information will be logged and recorded , with a
* screenshot for traceability and added debugging support .
* @ param expectedPromptText the expected text of the prompt
* @ param seconds the number of seconds to wait */
public void promptEquals ( double seconds , String expectedPromptText ) { } }
|
try { double timeTook = popup ( seconds ) ; timeTook = popupEquals ( seconds - timeTook , expectedPromptText ) ; checkPromptEquals ( expectedPromptText , seconds , timeTook ) ; } catch ( TimeoutException e ) { checkPromptEquals ( expectedPromptText , seconds , seconds ) ; }
|
public class BinaryNumberIncrementor { /** * Process specified entry and return the result .
* @ param entry entry to process
* @ return processing result */
public Object process ( BinaryEntry entry ) { } }
|
Binary binValue = entry . getBinaryValue ( ) ; if ( binValue == null ) { return binValue ; } PofValue pofValue = getPofValue ( binValue ) ; Number oldValue = ( Number ) get ( pofValue ) ; if ( oldValue == null ) { oldValue = Numbers . getDefaultValue ( numInc . getClass ( ) ) ; } Number newValue = Numbers . add ( oldValue , numInc ) ; set ( pofValue , newValue ) ; entry . updateBinaryValue ( pofValue . applyChanges ( ) ) ; return fPostInc ? newValue : oldValue ;
|
public class AAFAuthorizer { /** * Check remoted AAF Permissions
* @ param aau
* @ param type
* @ param instance
* @ return */
private Set < Permission > checkPermissions ( AAFAuthenticatedUser aau , String type , String instance ) { } }
|
// Can perform ALL actions
String fullName = aau . getFullName ( ) ; PermHolder ph = new PermHolder ( aau ) ; aafLur . fishOneOf ( fullName , ph , type , instance , actions ) ; return ph . permissions ;
|
public class ServiceLoaderHelper { /** * Uses the { @ link ServiceLoader } to load all SPI implementations of the
* passed class and return only the first instance .
* @ param < T >
* The implementation type to be loaded
* @ param aSPIClass
* The SPI interface class . May not be < code > null < / code > .
* @ param aClassLoader
* The class loader to use for the SPI loader . May not be
* < code > null < / code > .
* @ return A collection of all currently available plugins . Never
* < code > null < / code > . */
@ Nullable public static < T > T getFirstSPIImplementation ( @ Nonnull final Class < T > aSPIClass , @ Nonnull final ClassLoader aClassLoader ) { } }
|
return getFirstSPIImplementation ( aSPIClass , aClassLoader , null ) ;
|
public class LogLogisticDistribution { /** * Cumulative density function .
* @ param val Value
* @ param shape Shape
* @ param location Location
* @ param scale Scale
* @ return CDF */
public static double cdf ( double val , double shape , double location , double scale ) { } }
|
if ( val < location ) { return 0 ; } val = ( val - location ) / scale ; return 1. / ( 1. + FastMath . pow ( val , - shape ) ) ;
|
public class CassandraSpanStore { /** * This fans out into a number of requests corresponding to query input . In simplest case , there
* is less than a day of data queried , and only one expression . This implies one call to fetch
* trace IDs and another to retrieve the span details .
* < p > The amount of backend calls increase in dimensions of query complexity , days of data , and
* limit of traces requested . For example , a query like " http . path = / foo and error " will be two
* select statements for the expression , possibly follow - up calls for pagination ( when over 5K
* rows match ) . Once IDs are parsed , there ' s one call for each 5K rows of span data . This means
* " http . path = / foo and error " is minimally 3 network calls , the first two in parallel . */
@ Override public Call < List < List < Span > > > getTraces ( QueryRequest request ) { } }
|
if ( ! searchEnabled ) return Call . emptyList ( ) ; TimestampRange timestampRange = timestampRange ( request ) ; // If we have to make multiple queries , over fetch on indexes as they don ' t return distinct
// ( trace id , timestamp ) rows . This mitigates intersection resulting in < limit traces
final int traceIndexFetchSize = request . limit ( ) * indexFetchMultiplier ; List < Call < Map < String , Long > > > callsToIntersect = new ArrayList < > ( ) ; List < String > annotationKeys = CassandraUtil . annotationKeys ( request ) ; for ( String annotationKey : annotationKeys ) { if ( spanTable == null ) { throw new IllegalArgumentException ( request . annotationQueryString ( ) + " query unsupported due to missing annotation_query index" ) ; } callsToIntersect . add ( spanTable . newCall ( request . serviceName ( ) , annotationKey , timestampRange , traceIndexFetchSize ) ) ; } // Bucketed calls can be expensive when service name isn ' t specified . This guards against abuse .
if ( request . remoteServiceName ( ) != null || request . spanName ( ) != null || request . minDuration ( ) != null || callsToIntersect . isEmpty ( ) ) { callsToIntersect . add ( newBucketedTraceIdCall ( request , timestampRange , traceIndexFetchSize ) ) ; } if ( callsToIntersect . size ( ) == 1 ) { return callsToIntersect . get ( 0 ) . map ( traceIdsSortedByDescTimestamp ( ) ) . flatMap ( spans . newFlatMapper ( request ) ) ; } // We achieve the AND goal , by intersecting each of the key sets .
IntersectKeySets intersectedTraceIds = new IntersectKeySets ( callsToIntersect ) ; // @ xxx the sorting by timestamp desc is broken here ^
return intersectedTraceIds . flatMap ( spans . newFlatMapper ( request ) ) ;
|
public class LinkedOptionalMap { /** * Assuming all the entries of this map are present ( keys and values ) this method would return
* a map with these key and values , stripped from their Optional wrappers .
* NOTE : please note that if any of the key or values are absent this method would throw an { @ link IllegalStateException } . */
public LinkedHashMap < K , V > unwrapOptionals ( ) { } }
|
final LinkedHashMap < K , V > unwrapped = new LinkedHashMap < > ( underlyingMap . size ( ) ) ; for ( Entry < String , KeyValue < K , V > > entry : underlyingMap . entrySet ( ) ) { String namedKey = entry . getKey ( ) ; KeyValue < K , V > kv = entry . getValue ( ) ; if ( kv . key == null ) { throw new IllegalStateException ( "Missing key '" + namedKey + "'" ) ; } if ( kv . value == null ) { throw new IllegalStateException ( "Missing value for the key '" + namedKey + "'" ) ; } unwrapped . put ( kv . key , kv . value ) ; } return unwrapped ;
|
public class WSMessage { /** * Adds additional payload data .
* @ param additionalPayload */
public void addPayload ( IoBuffer additionalPayload ) { } }
|
if ( payload == null ) { payload = IoBuffer . allocate ( additionalPayload . remaining ( ) ) ; payload . setAutoExpand ( true ) ; } this . payload . put ( additionalPayload ) ;
|
public class Interface { /** * Use this API to reset Interface . */
public static base_response reset ( nitro_service client , Interface resource ) throws Exception { } }
|
Interface resetresource = new Interface ( ) ; resetresource . id = resource . id ; return resetresource . perform_operation ( client , "reset" ) ;
|
public class ISPNCacheWorkspaceStorageCache { /** * { @ inheritDoc } */
public void put ( ItemData item ) { } }
|
// There is different commit processing for NullNodeData and ordinary ItemData .
if ( item instanceof NullItemData ) { putNullItem ( ( NullItemData ) item ) ; return ; } boolean inTransaction = cache . isTransactionActive ( ) ; try { if ( ! inTransaction ) { cache . beginTransaction ( ) ; } cache . setLocal ( true ) ; if ( item . isNode ( ) ) { putNode ( ( NodeData ) item , ModifyChildOption . NOT_MODIFY ) ; } else { putProperty ( ( PropertyData ) item , ModifyChildOption . NOT_MODIFY ) ; } } finally { cache . setLocal ( false ) ; if ( ! inTransaction ) { dedicatedTxCommit ( ) ; } }
|
public class CmsChangedResourceCollector { /** * Returns a List of all changed resources in the folder pointed to by the parameter
* sorted by the date attributes specified in the parameter . < p >
* @ param cms the current CmsObject
* @ param param must contain an extended collector parameter set as described by { @ link CmsExtendedCollectorData }
* @ param tree if true , look in folder and all child folders , if false , look only in given folder
* @ param asc if < code > true < / code > , the sort is ascending ( old dates first ) , otherwise it is descending
* ( new dates first )
* @ param numResults number of results
* @ return a List of all resources in the folder pointed to by the parameter sorted by the selected dates
* @ throws CmsException if something goes wrong */
protected List < CmsResource > allChangedInFolderDate ( CmsObject cms , String param , boolean tree , boolean asc , int numResults ) throws CmsException { } }
|
Map < String , String > params = getParameters ( param ) ; String foldername = "/" ; if ( params . containsKey ( PARAM_KEY_RESOURCE ) ) { foldername = CmsResource . getFolderPath ( params . get ( PARAM_KEY_RESOURCE ) ) ; } long dateFrom = 0L ; long dateTo = Long . MAX_VALUE ; if ( params . containsKey ( PARAM_KEY_DATEFROM ) ) { try { dateFrom = Long . parseLong ( params . get ( PARAM_KEY_DATEFROM ) ) ; } catch ( NumberFormatException e ) { // error parsing from date
LOG . error ( Messages . get ( ) . getBundle ( ) . key ( Messages . ERR_COLLECTOR_PARAM_INVALID_1 , PARAM_KEY_DATEFROM + "=" + params . get ( PARAM_KEY_DATEFROM ) ) ) ; throw e ; } } if ( params . containsKey ( PARAM_KEY_DATETO ) ) { try { dateTo = Long . parseLong ( params . get ( PARAM_KEY_DATETO ) ) ; } catch ( NumberFormatException e ) { // error parsing to date
LOG . error ( Messages . get ( ) . getBundle ( ) . key ( Messages . ERR_COLLECTOR_PARAM_INVALID_1 , PARAM_KEY_DATETO + "=" + params . get ( PARAM_KEY_DATETO ) ) ) ; throw e ; } } // create the filter to read the resources
CmsResourceFilter filter = CmsResourceFilter . DEFAULT_FILES . addExcludeFlags ( CmsResource . FLAG_TEMPFILE ) . addRequireLastModifiedAfter ( dateFrom ) . addRequireLastModifiedBefore ( dateTo ) ; // check if a resource type has to be excluded
if ( params . containsKey ( PARAM_KEY_EXCLUDETYPE ) ) { String excludeType = params . get ( PARAM_KEY_EXCLUDETYPE ) ; int typeId = - 1 ; try { // try to look up the resource type
I_CmsResourceType resourceType = OpenCms . getResourceManager ( ) . getResourceType ( excludeType ) ; typeId = resourceType . getTypeId ( ) ; } catch ( CmsLoaderException e1 ) { // maybe the int ID is directly used ?
try { int typeInt = Integer . parseInt ( excludeType ) ; I_CmsResourceType resourceType = OpenCms . getResourceManager ( ) . getResourceType ( typeInt ) ; typeId = resourceType . getTypeId ( ) ; if ( LOG . isWarnEnabled ( ) ) { LOG . warn ( Messages . get ( ) . getBundle ( ) . key ( Messages . LOG_RESTYPE_INTID_2 , resourceType . getTypeName ( ) , new Integer ( resourceType . getTypeId ( ) ) ) ) ; } } catch ( NumberFormatException e2 ) { // bad number format used for type
throw new CmsRuntimeException ( Messages . get ( ) . container ( Messages . ERR_COLLECTOR_PARAM_INVALID_1 , PARAM_KEY_EXCLUDETYPE + "=" + params . get ( PARAM_KEY_EXCLUDETYPE ) ) , e2 ) ; } catch ( CmsLoaderException e2 ) { // this resource type does not exist
throw new CmsRuntimeException ( Messages . get ( ) . container ( Messages . ERR_UNKNOWN_RESTYPE_1 , excludeType ) , e2 ) ; } } if ( typeId != - 1 ) { filter = filter . addExcludeType ( typeId ) ; } } // read the resources using the configured filter
List < CmsResource > result = cms . readResources ( foldername , filter , tree ) ; // determine which attribute should be used to sort the result
String sortBy = CmsDateResourceComparator . DATE_ATTRIBUTES_LIST . get ( 1 ) ; if ( params . containsKey ( PARAM_KEY_SORTBY ) ) { sortBy = params . get ( PARAM_KEY_SORTBY ) ; } List < String > dateIdentifiers = new ArrayList < String > ( 1 ) ; dateIdentifiers . add ( sortBy ) ; // a special date comparator is used to sort the resources
CmsDateResourceComparator comparator = new CmsDateResourceComparator ( cms , dateIdentifiers , asc ) ; Collections . sort ( result , comparator ) ; int count = - 1 ; if ( params . containsKey ( PARAM_KEY_COUNT ) ) { try { count = Integer . parseInt ( params . get ( PARAM_KEY_COUNT ) ) ; } catch ( NumberFormatException e ) { // error parsing the count
LOG . error ( Messages . get ( ) . getBundle ( ) . key ( Messages . ERR_COLLECTOR_PARAM_INVALID_1 , PARAM_KEY_COUNT + "=" + params . get ( PARAM_KEY_COUNT ) ) ) ; throw e ; } } if ( ( count > 0 ) || ( numResults > 0 ) ) { return shrinkToFit ( result , count , numResults ) ; } else { return result ; }
|
public class Connector { /** * This method will try to make a simple tcp connection to the asterisk
* manager to establish it is up . We do this as the default makeConnection
* doesn ' t have a timeout and will sit trying to connect for a minute or so .
* By using method when the user realises they have a problem on start up
* they can go to the asterisk panel . Fix the problem and then we can retry
* with the new connection settings within a couple of seconds rather than
* waiting a minute for a timeout .
* @ param asteriskSettings
* @ throws UnknownHostException
* @ throws IOException */
private void checkIfAsteriskRunning ( AsteriskSettings asteriskSettings ) throws UnknownHostException , IOException { } }
|
try ( Socket socket = new Socket ( ) ) { socket . setSoTimeout ( 2000 ) ; InetSocketAddress asteriskHost = new InetSocketAddress ( asteriskSettings . getAsteriskIP ( ) , asteriskSettings . getManagerPortNo ( ) ) ; socket . connect ( asteriskHost , 2000 ) ; }
|
public class ExtensionList { /** * Loads all the extensions . */
protected List < ExtensionComponent < T > > load ( ) { } }
|
LOGGER . fine ( ( ) -> String . format ( "Loading ExtensionList '%s'" , extensionType . getName ( ) ) ) ; if ( LOGGER . isLoggable ( Level . FINER ) ) { LOGGER . log ( Level . FINER , String . format ( "Loading ExtensionList '%s' from" , extensionType . getName ( ) ) , new Throwable ( "Only present for stacktrace information" ) ) ; } return jenkins . getPluginManager ( ) . getPluginStrategy ( ) . findComponents ( extensionType , hudson ) ;
|
public class FormInterceptor { /** * Override paint in order to perform processing specific to this interceptor .
* @ param renderContext the renderContext to send the output to . */
@ Override public void paint ( final RenderContext renderContext ) { } }
|
getBackingComponent ( ) . paint ( renderContext ) ; // We don ' t want to remember the focus for the next render because on
// a multi portlet page , we ' d end up with multiple portlets trying to
// set the focus .
UIContext uic = UIContextHolder . getCurrent ( ) ; if ( uic . isFocusRequired ( ) ) { boolean sticky = ConfigurationProperties . getStickyFocus ( ) ; if ( ! sticky ) { uic . setFocussed ( null , null ) ; uic . setFocusRequired ( false ) ; } }
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.