signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class GetRelationalDatabaseSnapshotRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( GetRelationalDatabaseSnapshotRequest getRelationalDatabaseSnapshotRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( getRelationalDatabaseSnapshotRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( getRelationalDatabaseSnapshotRequest . getRelationalDatabaseSnapshotName ( ) , RELATIONALDATABASESNAPSHOTNAME_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class Convert { /** * 给定字符串转换字符编码 < br > * 如果参数为空 , 则返回原字符串 , 不报错 。 * @ param str 被转码的字符串 * @ param sourceCharset 原字符集 * @ param destCharset 目标字符集 * @ return 转换后的字符串 * @ see CharsetUtil # convert ( String , String , String ) */ public static String convertCharset ( String str , String sourceCharset , String destCharset ) { } }
if ( StrUtil . hasBlank ( str , sourceCharset , destCharset ) ) { return str ; } return CharsetUtil . convert ( str , sourceCharset , destCharset ) ;
public class GradientToEdgeFeatures { /** * Sets edge intensities to zero if the pixel has an intensity which is less than either of * the two adjacent pixels . Pixel adjacency is determined by the gradients discretized direction . * @ param intensity Edge intensities . Not modified . * @ param direction 8 - Discretized direction . See { @ link # discretizeDirection8 ( GrayF32 , GrayS8 ) } . Not modified . * @ param output Filtered intensity . If null a new image will be declared and returned . Modified . * @ return Filtered edge intensity . */ static public GrayF32 nonMaxSuppression8 ( GrayF32 intensity , GrayS8 direction , GrayF32 output ) { } }
InputSanityCheck . checkSameShape ( intensity , direction ) ; output = InputSanityCheck . checkDeclare ( intensity , output ) ; if ( BoofConcurrency . USE_CONCURRENT ) { ImplEdgeNonMaxSuppression_MT . inner8 ( intensity , direction , output ) ; ImplEdgeNonMaxSuppression_MT . border8 ( intensity , direction , output ) ; } else { ImplEdgeNonMaxSuppression . inner8 ( intensity , direction , output ) ; ImplEdgeNonMaxSuppression . border8 ( intensity , direction , output ) ; } return output ;
public class Convert { /** * Convert c . * @ param < T > the type parameter * @ param < C > the type parameter * @ param object the object * @ param collectionClass the collection class * @ param componentClass the component class * @ return the c */ @ SuppressWarnings ( "unchecked" ) public static < T , C extends Collection < T > > C convert ( Object object , Class < ? > collectionClass , Class < T > componentClass ) { } }
if ( collectionClass == null || ! Collection . class . isAssignableFrom ( collectionClass ) ) { log . fine ( "{0} does not extend collection." , collectionClass ) ; return null ; } return Cast . as ( CollectionConverter . COLLECTION ( Cast . < Class < C > > as ( collectionClass ) , componentClass ) . apply ( object ) ) ;
public class Description { /** * Returns the byte representation of this description structure . * @ return byte array containing structure data */ public byte [ ] toByteArray ( ) { } }
final byte [ ] data = new byte [ 7 ] ; data [ 0 ] = ( byte ) oindex ; data [ 1 ] = ( byte ) id ; data [ 2 ] = ( byte ) pindex ; data [ 3 ] = ( byte ) ( write ? 0x80 : 0x00 ) ; data [ 3 ] |= pdt & 0x3f ; data [ 4 ] = ( byte ) ( maxElems >> 8 ) ; data [ 5 ] = ( byte ) maxElems ; data [ 6 ] = ( byte ) ( rLevel << 4 | ( wLevel & 0x0f ) ) ; return data ;
public class DCModuleGenerator { /** * Populate an element tree with elements for a module . * @ param module the module to populate from . * @ param element the root element to attach child elements to . */ @ Override public final void generate ( final Module module , final Element element ) { } }
final DCModule dcModule = ( DCModule ) module ; final String title = dcModule . getTitle ( ) ; if ( title != null ) { element . addContent ( generateSimpleElementList ( "title" , dcModule . getTitles ( ) ) ) ; } final String creator = dcModule . getCreator ( ) ; if ( creator != null ) { element . addContent ( generateSimpleElementList ( "creator" , dcModule . getCreators ( ) ) ) ; } final List < DCSubject > subjects = dcModule . getSubjects ( ) ; for ( final DCSubject dcSubject : subjects ) { element . addContent ( generateSubjectElement ( dcSubject ) ) ; } final String description = dcModule . getDescription ( ) ; if ( description != null ) { element . addContent ( generateSimpleElementList ( "description" , dcModule . getDescriptions ( ) ) ) ; } final String publisher = dcModule . getPublisher ( ) ; if ( publisher != null ) { element . addContent ( generateSimpleElementList ( "publisher" , dcModule . getPublishers ( ) ) ) ; } final List < String > contributors = dcModule . getContributors ( ) ; if ( contributors != null ) { element . addContent ( generateSimpleElementList ( "contributor" , contributors ) ) ; } final Date dcDate = dcModule . getDate ( ) ; if ( dcDate != null ) { for ( final Date date : dcModule . getDates ( ) ) { element . addContent ( generateSimpleElement ( "date" , DateParser . formatW3CDateTime ( date , Locale . US ) ) ) ; } } final String type = dcModule . getType ( ) ; if ( type != null ) { element . addContent ( generateSimpleElementList ( "type" , dcModule . getTypes ( ) ) ) ; } final String format = dcModule . getFormat ( ) ; if ( format != null ) { element . addContent ( generateSimpleElementList ( "format" , dcModule . getFormats ( ) ) ) ; } final String identifier = dcModule . getIdentifier ( ) ; if ( identifier != null ) { element . addContent ( generateSimpleElementList ( "identifier" , dcModule . getIdentifiers ( ) ) ) ; } final String source = dcModule . getSource ( ) ; if ( source != null ) { element . addContent ( generateSimpleElementList ( "source" , dcModule . getSources ( ) ) ) ; } final String language = dcModule . getLanguage ( ) ; if ( language != null ) { element . addContent ( generateSimpleElementList ( "language" , dcModule . getLanguages ( ) ) ) ; } final String relation = dcModule . getRelation ( ) ; if ( relation != null ) { element . addContent ( generateSimpleElementList ( "relation" , dcModule . getRelations ( ) ) ) ; } final String coverage = dcModule . getCoverage ( ) ; if ( coverage != null ) { element . addContent ( generateSimpleElementList ( "coverage" , dcModule . getCoverages ( ) ) ) ; } final String rights = dcModule . getRights ( ) ; if ( rights != null ) { element . addContent ( generateSimpleElementList ( "rights" , dcModule . getRightsList ( ) ) ) ; }
public class MarkLogicRepositoryConnection { /** * add triples via URL * sets base URI to url if none is supplied * @ param url * @ param baseURI * @ param dataFormat * @ param contexts * @ throws IOException * @ throws RDFParseException * @ throws RepositoryException */ @ Override public void add ( URL url , String baseURI , RDFFormat dataFormat , Resource ... contexts ) throws IOException , RDFParseException , RepositoryException { } }
if ( notNull ( baseURI ) ) { getClient ( ) . sendAdd ( new URL ( url . toString ( ) ) . openStream ( ) , baseURI , dataFormat , contexts ) ; } else { getClient ( ) . sendAdd ( new URL ( url . toString ( ) ) . openStream ( ) , url . toString ( ) , dataFormat , contexts ) ; }
public class CorePlugin { /** * Implements the { @ link de . is24 . util . monitoring . InApplicationMonitor } side of the Visitor pattern . * Iterates through all registered { @ link de . is24 . util . monitoring . Reportable } instances and calls * the corresponding method on the { @ link de . is24 . util . monitoring . ReportVisitor } implementation . * @ param reportVisitor The { @ link de . is24 . util . monitoring . ReportVisitor } instance that shall be visited * by all regieteres { @ link de . is24 . util . monitoring . Reportable } instances . */ public void reportInto ( ReportVisitor reportVisitor ) { } }
counters . accept ( reportVisitor ) ; timers . accept ( reportVisitor ) ; stateValues . accept ( reportVisitor ) ; multiValues . accept ( reportVisitor ) ; versions . accept ( reportVisitor ) ; historizableLists . accept ( reportVisitor ) ;
public class ObjectManager { /** * Factory method to crate a new transaction for use with the ObjectManager . * @ return Transaction the new transaction . * @ throws ObjectManagerException */ public final Transaction getTransaction ( ) throws ObjectManagerException { } }
if ( Tracing . isAnyTracingEnabled ( ) && trace . isEntryEnabled ( ) ) trace . entry ( this , cclass , "getTransaction" ) ; // If the log is full introduce a delay for a checkpoiunt before allowing the // application to proceed . objectManagerState . transactionPacing ( ) ; Transaction transaction = objectManagerState . getTransaction ( ) ; if ( Tracing . isAnyTracingEnabled ( ) && trace . isEntryEnabled ( ) ) trace . exit ( this , cclass , "getTransaction" , "returns transaction=" + transaction + "(Transaction)" ) ; return transaction ;
public class ExcelReader { /** * 读取工作簿中指定的Sheet * @ param startRowIndex 起始行 ( 包含 , 从0开始计数 ) * @ param endRowIndex 结束行 ( 包含 , 从0开始计数 ) * @ return 行的集合 , 一行使用List表示 */ @ SuppressWarnings ( { } }
"rawtypes" , "unchecked" } ) public List < List < Object > > read ( int startRowIndex , int endRowIndex ) { checkNotClosed ( ) ; List < List < Object > > resultList = new ArrayList < > ( ) ; startRowIndex = Math . max ( startRowIndex , this . sheet . getFirstRowNum ( ) ) ; // 读取起始行 ( 包含 ) endRowIndex = Math . min ( endRowIndex , this . sheet . getLastRowNum ( ) ) ; // 读取结束行 ( 包含 ) boolean isFirstLine = true ; List rowList ; for ( int i = startRowIndex ; i <= endRowIndex ; i ++ ) { rowList = readRow ( i ) ; if ( CollUtil . isNotEmpty ( rowList ) || false == ignoreEmptyRow ) { if ( null == rowList ) { rowList = new ArrayList < > ( 0 ) ; } if ( isFirstLine ) { isFirstLine = false ; if ( MapUtil . isNotEmpty ( this . headerAlias ) ) { rowList = aliasHeader ( rowList ) ; } } resultList . add ( rowList ) ; } } return resultList ;
public class ListCertificatesRequest { /** * Filter the certificate list by status value . * @ param certificateStatuses * Filter the certificate list by status value . * @ see CertificateStatus */ public void setCertificateStatuses ( java . util . Collection < String > certificateStatuses ) { } }
if ( certificateStatuses == null ) { this . certificateStatuses = null ; return ; } this . certificateStatuses = new java . util . ArrayList < String > ( certificateStatuses ) ;
public class Metric { /** * Use { @ link # getTagsMap ( ) } instead . */ @ java . lang . Deprecated public java . util . Map < java . lang . String , java . lang . String > getTags ( ) { } }
return getTagsMap ( ) ;
public class TargetStreamControl { /** * / * ( non - Javadoc ) * @ see com . ibm . ws . sib . processor . runtime . SIMPDeliveryStreamReceiverControllable # getQueuedMessageByID */ public SIMPReceivedMessageControllable getReceivedMessageByID ( String id ) { } }
SIMPReceivedMessageControllable returnMessage = null ; SIMPIterator iterator = getReceivedMessageIterator ( SIMPConstants . SIMPCONTROL_RETURN_ALL_MESSAGES ) ; while ( iterator . hasNext ( ) ) { SIMPReceivedMessageControllable receivedMessage = ( SIMPReceivedMessageControllable ) iterator . next ( ) ; String msgID = receivedMessage . getId ( ) ; if ( msgID . equals ( id ) ) { returnMessage = receivedMessage ; break ; } } return returnMessage ;
public class SessionApi { /** * Get business attribute hierarchy * Get the business attribute hierarchy for the specified business attribute . * @ param id The unique ID of the business attribute . ( required ) * @ return ApiSuccessResponse * @ throws ApiException If fail to call the API , e . g . server error or cannot deserialize the response body */ public ApiSuccessResponse getBusinessAttributeHierarchy ( Integer id ) throws ApiException { } }
ApiResponse < ApiSuccessResponse > resp = getBusinessAttributeHierarchyWithHttpInfo ( id ) ; return resp . getData ( ) ;
public class UCharacterName { /** * Gets the group index for the codepoint , or the group before it . * @ param codepoint The codepoint index . * @ return group index containing codepoint or the group before it . */ public int getGroup ( int codepoint ) { } }
int endGroup = m_groupcount_ ; int msb = getCodepointMSB ( codepoint ) ; int result = 0 ; // binary search for the group of names that contains the one for // code // find the group that contains codepoint , or the highest before it while ( result < endGroup - 1 ) { int gindex = ( result + endGroup ) >> 1 ; if ( msb < getGroupMSB ( gindex ) ) { endGroup = gindex ; } else { result = gindex ; } } return result ;
public class DateTimeUtils { /** * Add / Subtract the specified amount of hours to the given { @ link Calendar } . * The returned { @ link Calendar } has its fields synced . * @ param origin * @ param value * @ return * @ since 0.9.2 */ public static Calendar addHours ( Calendar origin , int value ) { } }
Calendar cal = sync ( ( Calendar ) origin . clone ( ) ) ; cal . add ( Calendar . HOUR_OF_DAY , value ) ; return sync ( cal ) ;
public class TemplateList { /** * Get the head of the most likely list of associations to check , based on * the name and type of the targetNode argument . * @ param xctxt The XPath runtime context . * @ param targetNode The target node that will be checked for a match . * @ param dtm The dtm owner for the target node . * @ return The head of a linked list that contains all possible match pattern to * template associations . */ public TemplateSubPatternAssociation getHead ( XPathContext xctxt , int targetNode , DTM dtm ) { } }
short targetNodeType = dtm . getNodeType ( targetNode ) ; TemplateSubPatternAssociation head ; switch ( targetNodeType ) { case DTM . ELEMENT_NODE : case DTM . ATTRIBUTE_NODE : head = ( TemplateSubPatternAssociation ) m_patternTable . get ( dtm . getLocalName ( targetNode ) ) ; break ; case DTM . TEXT_NODE : case DTM . CDATA_SECTION_NODE : head = m_textPatterns ; break ; case DTM . ENTITY_REFERENCE_NODE : case DTM . ENTITY_NODE : head = ( TemplateSubPatternAssociation ) m_patternTable . get ( dtm . getNodeName ( targetNode ) ) ; // % REVIEW % I think this is right break ; case DTM . PROCESSING_INSTRUCTION_NODE : head = ( TemplateSubPatternAssociation ) m_patternTable . get ( dtm . getLocalName ( targetNode ) ) ; break ; case DTM . COMMENT_NODE : head = m_commentPatterns ; break ; case DTM . DOCUMENT_NODE : case DTM . DOCUMENT_FRAGMENT_NODE : head = m_docPatterns ; break ; case DTM . NOTATION_NODE : default : head = ( TemplateSubPatternAssociation ) m_patternTable . get ( dtm . getNodeName ( targetNode ) ) ; // % REVIEW % I think this is right } return ( null == head ) ? m_wildCardPatterns : head ;
public class InternalSimpleAntlrParser { /** * InternalSimpleAntlr . g : 1086:1 : ruleNotExpression returns [ EObject current = null ] : ( this _ PrimaryExpression _ 0 = rulePrimaryExpression | ( ( ) otherlv _ 2 = ' ! ' ( ( lv _ value _ 3_0 = ruleNotExpression ) ) ) ) ; */ public final EObject ruleNotExpression ( ) throws RecognitionException { } }
EObject current = null ; Token otherlv_2 = null ; EObject this_PrimaryExpression_0 = null ; EObject lv_value_3_0 = null ; enterRule ( ) ; try { // InternalSimpleAntlr . g : 1089:28 : ( ( this _ PrimaryExpression _ 0 = rulePrimaryExpression | ( ( ) otherlv _ 2 = ' ! ' ( ( lv _ value _ 3_0 = ruleNotExpression ) ) ) ) ) // InternalSimpleAntlr . g : 1090:1 : ( this _ PrimaryExpression _ 0 = rulePrimaryExpression | ( ( ) otherlv _ 2 = ' ! ' ( ( lv _ value _ 3_0 = ruleNotExpression ) ) ) ) { // InternalSimpleAntlr . g : 1090:1 : ( this _ PrimaryExpression _ 0 = rulePrimaryExpression | ( ( ) otherlv _ 2 = ' ! ' ( ( lv _ value _ 3_0 = ruleNotExpression ) ) ) ) int alt21 = 2 ; int LA21_0 = input . LA ( 1 ) ; if ( ( LA21_0 == RULE_ID || LA21_0 == RULE_OPEN ) ) { alt21 = 1 ; } else if ( ( LA21_0 == 33 ) ) { alt21 = 2 ; } else { if ( state . backtracking > 0 ) { state . failed = true ; return current ; } NoViableAltException nvae = new NoViableAltException ( "" , 21 , 0 , input ) ; throw nvae ; } switch ( alt21 ) { case 1 : // InternalSimpleAntlr . g : 1091:2 : this _ PrimaryExpression _ 0 = rulePrimaryExpression { if ( state . backtracking == 0 ) { } if ( state . backtracking == 0 ) { newCompositeNode ( grammarAccess . getNotExpressionAccess ( ) . getPrimaryExpressionParserRuleCall_0 ( ) ) ; } pushFollow ( FOLLOW_2 ) ; this_PrimaryExpression_0 = rulePrimaryExpression ( ) ; state . _fsp -- ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { current = this_PrimaryExpression_0 ; afterParserOrEnumRuleCall ( ) ; } } break ; case 2 : // InternalSimpleAntlr . g : 1103:6 : ( ( ) otherlv _ 2 = ' ! ' ( ( lv _ value _ 3_0 = ruleNotExpression ) ) ) { // InternalSimpleAntlr . g : 1103:6 : ( ( ) otherlv _ 2 = ' ! ' ( ( lv _ value _ 3_0 = ruleNotExpression ) ) ) // InternalSimpleAntlr . g : 1103:7 : ( ) otherlv _ 2 = ' ! ' ( ( lv _ value _ 3_0 = ruleNotExpression ) ) { // InternalSimpleAntlr . g : 1103:7 : ( ) // InternalSimpleAntlr . g : 1104:2: { if ( state . backtracking == 0 ) { } if ( state . backtracking == 0 ) { current = forceCreateModelElement ( grammarAccess . getNotExpressionAccess ( ) . getNotExpressionAction_1_0 ( ) , current ) ; } } otherlv_2 = ( Token ) match ( input , 33 , FOLLOW_20 ) ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { newLeafNode ( otherlv_2 , grammarAccess . getNotExpressionAccess ( ) . getExclamationMarkKeyword_1_1 ( ) ) ; } // InternalSimpleAntlr . g : 1116:1 : ( ( lv _ value _ 3_0 = ruleNotExpression ) ) // InternalSimpleAntlr . g : 1117:1 : ( lv _ value _ 3_0 = ruleNotExpression ) { // InternalSimpleAntlr . g : 1117:1 : ( lv _ value _ 3_0 = ruleNotExpression ) // InternalSimpleAntlr . g : 1118:3 : lv _ value _ 3_0 = ruleNotExpression { if ( state . backtracking == 0 ) { newCompositeNode ( grammarAccess . getNotExpressionAccess ( ) . getValueNotExpressionParserRuleCall_1_2_0 ( ) ) ; } pushFollow ( FOLLOW_2 ) ; lv_value_3_0 = ruleNotExpression ( ) ; state . _fsp -- ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { if ( current == null ) { current = createModelElementForParent ( grammarAccess . getNotExpressionRule ( ) ) ; } set ( current , "value" , lv_value_3_0 , "org.eclipse.xtext.generator.parser.antlr.debug.SimpleAntlr.NotExpression" ) ; afterParserOrEnumRuleCall ( ) ; } } } } } break ; } } if ( state . backtracking == 0 ) { leaveRule ( ) ; } } catch ( RecognitionException re ) { recover ( input , re ) ; appendSkippedTokens ( ) ; } finally { } return current ;
public class appfwpolicy_appfwpolicylabel_binding { /** * Use this API to fetch appfwpolicy _ appfwpolicylabel _ binding resources of given name . */ public static appfwpolicy_appfwpolicylabel_binding [ ] get ( nitro_service service , String name ) throws Exception { } }
appfwpolicy_appfwpolicylabel_binding obj = new appfwpolicy_appfwpolicylabel_binding ( ) ; obj . set_name ( name ) ; appfwpolicy_appfwpolicylabel_binding response [ ] = ( appfwpolicy_appfwpolicylabel_binding [ ] ) obj . get_resources ( service ) ; return response ;
public class WxaAPI { /** * < strong > 图片检查 < / strong > < br > * 校验一张图片是否含有违法违规内容 。 < br > * 应用场景举例 : < br > * 1 ) 图片智能鉴黄 : 涉及拍照的工具类应用 ( 如美拍 , 识图类应用 ) 用户拍照上传检测 ; 电商类商品上架图片检测 ; 媒体类用户文章里的图片检测等 ; < br > * 2 ) 敏感人脸识别 : 用户头像 ; 媒体类用户文章里的图片检测 ; 社交类用户上传的图片检测等 < br > * < br > * 频率限制 : 单个 appId 调用上限为 1000 次 / 分钟 , 100,000 次 / 天 * @ since 2.8.20 * @ param access _ token access _ token * @ param media 要检测的图片文件 , 格式支持PNG 、 JPEG 、 JPG 、 GIF , 图片尺寸不超过 750px * 1334px * @ return result */ public static BaseResult img_sec_check ( String access_token , File media ) { } }
HttpPost httpPost = new HttpPost ( BASE_URI + "/wxa/img_sec_check" ) ; FileBody bin = new FileBody ( media ) ; HttpEntity reqEntity = MultipartEntityBuilder . create ( ) . addPart ( "media" , bin ) . addTextBody ( PARAM_ACCESS_TOKEN , API . accessToken ( access_token ) ) . build ( ) ; httpPost . setEntity ( reqEntity ) ; return LocalHttpClient . executeJsonResult ( httpPost , BaseResult . class ) ;
public class ThrowableFormatCommand { /** * Set the log data . * @ see FormatCommandInterface # execute ( String , String , long , Level , Object , * Throwable ) */ public String execute ( String clientID , String name , long time , Level level , Object message , Throwable throwable ) { } }
StringBuilder sb = new StringBuilder ( ) ; if ( throwable != null ) { sb . append ( throwable . toString ( ) ) ; String newline = System . getProperty ( "line.separator" ) ; StackTraceElement [ ] stackTrace = throwable . getStackTrace ( ) ; for ( int i = 0 ; i < stackTrace . length ; i ++ ) { StackTraceElement element = stackTrace [ i ] ; sb . append ( newline ) ; sb . append ( "\tat " ) ; sb . append ( element . toString ( ) ) ; } } return sb . toString ( ) ;
public class Model { /** * Returns parent of this model , assuming that this table represents a child . * This method may return < code > null < / code > in cases when you have orphan record and * referential integrity is not enforced in DBMS with a foreign key constraint . * @ param parentClass class of a parent model . * @ return instance of a parent of this instance in the " belongs to " relationship if found , ot null if not found . */ public < P extends Model > P parent ( Class < P > parentClass ) { } }
return parent ( parentClass , false ) ;
public class SchemaTypeAdapter { /** * Constructs { @ link Schema . Type # UNION UNION } type schema from the json input . * @ param reader The { @ link JsonReader } for streaming json input tokens . * @ param knownRecords Set of record name already encountered during the reading . * @ return A { @ link Schema } of type { @ link Schema . Type # UNION UNION } . * @ throws java . io . IOException When fails to construct a valid schema from the input . */ private Schema readUnion ( JsonReader reader , Set < String > knownRecords ) throws IOException { } }
ImmutableList . Builder < Schema > unionSchemas = ImmutableList . builder ( ) ; reader . beginArray ( ) ; while ( reader . peek ( ) != JsonToken . END_ARRAY ) { unionSchemas . add ( read ( reader , knownRecords ) ) ; } reader . endArray ( ) ; return Schema . unionOf ( unionSchemas . build ( ) ) ;
public class StreamletUtils { /** * Selects a random item from a list . Used in many example source streamlets . */ public static < T > T randomFromList ( List < T > ls ) { } }
return ls . get ( new Random ( ) . nextInt ( ls . size ( ) ) ) ;
public class DefaultPrincipalElectionStrategy { /** * Gets principal attributes for principal . * @ param principal the principal * @ param principalAttributes the principal attributes * @ return the principal attributes for principal */ protected Map < String , List < Object > > getPrincipalAttributesForPrincipal ( final Principal principal , final Map < String , List < Object > > principalAttributes ) { } }
return principalAttributes ;
public class DefuzzifierFactory { /** * Creates a Defuzzifier by executing the registered constructor * @ param key is the unique name by which constructors are registered * @ param resolution is the resolution of an IntegralDefuzzifier * @ return a Defuzzifier by executing the registered constructor and setting * its resolution */ public Defuzzifier constructDefuzzifier ( String key , int resolution ) { } }
Defuzzifier result = constructObject ( key ) ; if ( result instanceof IntegralDefuzzifier ) { ( ( IntegralDefuzzifier ) result ) . setResolution ( resolution ) ; } return result ;
public class PravegaTablesStoreHelper { /** * We dont want to do indefinite retries because for controller ' s graceful shutting down , it waits on grpc service to * be terminated which in turn waits on all outstanding grpc calls to complete . And the store may stall the calls if * there is indefinite retries . Restricting it to 12 retries gives us ~ 60 seconds worth of wait on the upper side . * Also , note that the call can fail because hostContainerMap has not been updated or it can fail because it cannot * talk to segment store . Both these are translated to ConnectionErrors and are retried . All other exceptions * are thrown back */ private < T > CompletableFuture < T > withRetries ( Supplier < CompletableFuture < T > > futureSupplier , Supplier < String > errorMessage ) { } }
return RetryHelper . withRetriesAsync ( exceptionalCallback ( futureSupplier , errorMessage ) , e -> { Throwable unwrap = Exceptions . unwrap ( e ) ; return unwrap instanceof StoreException . StoreConnectionException ; } , NUM_OF_RETRIES , executor ) ;
public class Flowable { /** * Maps the upstream items into { @ link CompletableSource } s and subscribes to them one after the * other completes . * < img width = " 640 " height = " 305 " src = " https : / / raw . github . com / wiki / ReactiveX / RxJava / images / rx - operators / concatMap . png " alt = " " > * < dl > * < dt > < b > Backpressure : < / b > < / dt > * < dd > The operator expects the upstream to support backpressure . If this { @ code Flowable } violates the rule , the operator will * signal a { @ code MissingBackpressureException } . < / dd > * < dt > < b > Scheduler : < / b > < / dt > * < dd > { @ code concatMapCompletable } does not operate by default on a particular { @ link Scheduler } . < / dd > * < / dl > * < p > History : 2.1.11 - experimental * @ param mapper the function called with the upstream item and should return * a { @ code CompletableSource } to become the next source to * be subscribed to * @ param prefetch The number of upstream items to prefetch so that fresh items are * ready to be mapped when a previous { @ code CompletableSource } terminates . * The operator replenishes after half of the prefetch amount has been consumed * and turned into { @ code CompletableSource } s . * @ return a new Completable instance * @ see # concatMapCompletableDelayError ( Function , boolean , int ) * @ since 2.2 */ @ CheckReturnValue @ SchedulerSupport ( SchedulerSupport . NONE ) @ BackpressureSupport ( BackpressureKind . FULL ) public final Completable concatMapCompletable ( Function < ? super T , ? extends CompletableSource > mapper , int prefetch ) { } }
ObjectHelper . requireNonNull ( mapper , "mapper is null" ) ; ObjectHelper . verifyPositive ( prefetch , "prefetch" ) ; return RxJavaPlugins . onAssembly ( new FlowableConcatMapCompletable < T > ( this , mapper , ErrorMode . IMMEDIATE , prefetch ) ) ;
public class Rss2Parser { /** * Parses the media content of the entry * @ param tag The tag which to handle . * @ param article Article object to assign the node value to . */ private void handleMediaContent ( String tag , Article article ) { } }
String url = xmlParser . getAttributeValue ( null , "url" ) ; if ( url == null ) { throw new IllegalArgumentException ( "Url argument must not be null" ) ; } Article . MediaContent mc = new Article . MediaContent ( ) ; article . addMediaContent ( mc ) ; mc . setUrl ( url ) ; if ( xmlParser . getAttributeValue ( null , "type" ) != null ) { mc . setType ( xmlParser . getAttributeValue ( null , "type" ) ) ; } if ( xmlParser . getAttributeValue ( null , "fileSize" ) != null ) { mc . setFileSize ( Integer . parseInt ( xmlParser . getAttributeValue ( null , "fileSize" ) ) ) ; } if ( xmlParser . getAttributeValue ( null , "medium" ) != null ) { mc . setMedium ( xmlParser . getAttributeValue ( null , "medium" ) ) ; } if ( xmlParser . getAttributeValue ( null , "isDefault" ) != null ) { mc . setIsDefault ( Boolean . parseBoolean ( xmlParser . getAttributeValue ( null , "isDefault" ) ) ) ; } if ( xmlParser . getAttributeValue ( null , "expression" ) != null ) { mc . setExpression ( xmlParser . getAttributeValue ( null , "expression" ) ) ; } if ( xmlParser . getAttributeValue ( null , "bitrate" ) != null ) { mc . setBitrate ( Integer . parseInt ( xmlParser . getAttributeValue ( null , "bitrate" ) ) ) ; } if ( xmlParser . getAttributeValue ( null , "framerate" ) != null ) { mc . setFramerate ( Integer . parseInt ( xmlParser . getAttributeValue ( null , "framerate" ) ) ) ; } if ( xmlParser . getAttributeValue ( null , "samplingrate" ) != null ) { mc . setSamplingrate ( Integer . parseInt ( xmlParser . getAttributeValue ( null , "samplingrate" ) ) ) ; } if ( xmlParser . getAttributeValue ( null , "channels" ) != null ) { mc . setChannels ( Integer . parseInt ( xmlParser . getAttributeValue ( null , "channels" ) ) ) ; } if ( xmlParser . getAttributeValue ( null , "duration" ) != null ) { mc . setDuration ( Integer . parseInt ( xmlParser . getAttributeValue ( null , "duration" ) ) ) ; } if ( xmlParser . getAttributeValue ( null , "height" ) != null ) { mc . setHeight ( Integer . parseInt ( xmlParser . getAttributeValue ( null , "height" ) ) ) ; } if ( xmlParser . getAttributeValue ( null , "width" ) != null ) { mc . setWidth ( Integer . parseInt ( xmlParser . getAttributeValue ( null , "width" ) ) ) ; } if ( xmlParser . getAttributeValue ( null , "lang" ) != null ) { mc . setLang ( xmlParser . getAttributeValue ( null , "lang" ) ) ; }
public class BaseMessageManager { /** * Add this message filter to the appropriate queue . * The message filter contains the queue name and type and a listener to send the message to . * @ param messageFilter The message filter to add . * @ return An error code . */ public int addMessageFilter ( MessageFilter messageFilter ) { } }
MessageReceiver receiver = this . getMessageQueue ( messageFilter . getQueueName ( ) , messageFilter . getQueueType ( ) ) . getMessageReceiver ( ) ; receiver . addMessageFilter ( messageFilter ) ; return Constant . NORMAL_RETURN ;
public class SequenceFileSource { /** * { @ inheritDoc } */ @ Override protected FileBasedSource < KV < K , V > > createForSubrangeOfFile ( Metadata fileMetadata , long start , long end ) { } }
LOG . debug ( "Creating source for subrange: " + start + "-" + end ) ; return new SequenceFileSource < > ( fileMetadata , start , end , keyClass , keySerializationClass , valueClass , valueSerializationClass , getMinBundleSize ( ) , coder ) ;
public class ElementMatchers { /** * Matches a field in its defined shape . * @ param matcher The matcher to apply to the matched field ' s defined shape . * @ param < T > The matched object ' s type . * @ return A matcher that matches a matched field ' s defined shape . */ public static < T extends FieldDescription > ElementMatcher . Junction < T > definedField ( ElementMatcher < ? super FieldDescription . InDefinedShape > matcher ) { } }
return new DefinedShapeMatcher < T , FieldDescription . InDefinedShape > ( matcher ) ;
public class TimeZone { /** * Return a new String array containing all system TimeZone IDs * with the given raw offset from GMT . These IDs may be passed to * < code > get ( ) < / code > to construct the corresponding TimeZone * object . * @ param rawOffset the offset in milliseconds from GMT * @ return an array of IDs for system TimeZones with the given * raw offset . If there are none , return a zero - length array . * @ see # getAvailableIDs ( SystemTimeZoneType , String , Integer ) */ public static String [ ] getAvailableIDs ( int rawOffset ) { } }
Set < String > ids = getAvailableIDs ( SystemTimeZoneType . ANY , null , Integer . valueOf ( rawOffset ) ) ; return ids . toArray ( new String [ 0 ] ) ;
public class Seq { /** * This is equivalent to : * < pre > * < code > * if ( isEmpty ( ) ) { * return Nullable . empty ( ) ; * final Iterator < T > iter = iterator ( ) ; * T result = iter . next ( ) ; * while ( iter . hasNext ( ) ) { * result = accumulator . apply ( result , iter . next ( ) ) ; * return Nullable . of ( result ) ; * < / code > * < / pre > * @ param accumulator * @ return */ public < E extends Exception > Nullable < T > reduce ( Try . BinaryOperator < T , E > accumulator ) throws E { } }
N . checkArgNotNull ( accumulator ) ; if ( isEmpty ( ) ) { return Nullable . empty ( ) ; } final Iterator < T > iter = iterator ( ) ; T result = iter . next ( ) ; while ( iter . hasNext ( ) ) { result = accumulator . apply ( result , iter . next ( ) ) ; } return Nullable . of ( result ) ;
public class FeatureGenerators { /** * See { @ link # productFeatureGenerator ( Iterable ) } . * @ param generators * @ return */ public static < A , B > FeatureGenerator < A , List < B > > productFeatureGenerator ( FeatureGenerator < A , B > ... generators ) { } }
return FeatureGenerators . productFeatureGenerator ( Arrays . asList ( generators ) ) ;
public class RequestCreator { /** * An error drawable to be used if the request image could not be loaded . */ @ NonNull public RequestCreator error ( @ DrawableRes int errorResId ) { } }
if ( errorResId == 0 ) { throw new IllegalArgumentException ( "Error image resource invalid." ) ; } if ( errorDrawable != null ) { throw new IllegalStateException ( "Error image already set." ) ; } this . errorResId = errorResId ; return this ;
public class GroupCombineOperator { @ Override protected GroupCombineOperatorBase < ? , OUT , ? > translateToDataFlow ( Operator < IN > input ) { } }
String name = getName ( ) != null ? getName ( ) : "GroupCombine at " + defaultName ; // distinguish between grouped reduce and non - grouped reduce if ( grouper == null ) { // non grouped reduce UnaryOperatorInformation < IN , OUT > operatorInfo = new UnaryOperatorInformation < > ( getInputType ( ) , getResultType ( ) ) ; GroupCombineOperatorBase < IN , OUT , GroupCombineFunction < IN , OUT > > po = new GroupCombineOperatorBase < > ( function , operatorInfo , new int [ 0 ] , name ) ; po . setInput ( input ) ; // the parallelism for a non grouped reduce can only be 1 po . setParallelism ( 1 ) ; return po ; } if ( grouper . getKeys ( ) instanceof SelectorFunctionKeys ) { @ SuppressWarnings ( "unchecked" ) SelectorFunctionKeys < IN , ? > selectorKeys = ( SelectorFunctionKeys < IN , ? > ) grouper . getKeys ( ) ; if ( grouper instanceof SortedGrouping ) { SortedGrouping < IN > sortedGrouping = ( SortedGrouping < IN > ) grouper ; SelectorFunctionKeys < IN , ? > sortKeys = sortedGrouping . getSortSelectionFunctionKey ( ) ; Ordering groupOrder = sortedGrouping . getGroupOrdering ( ) ; PlanUnwrappingSortedGroupCombineOperator < IN , OUT , ? , ? > po = translateSelectorFunctionSortedReducer ( selectorKeys , sortKeys , groupOrder , function , getResultType ( ) , name , input ) ; po . setParallelism ( this . getParallelism ( ) ) ; return po ; } else { PlanUnwrappingGroupCombineOperator < IN , OUT , ? > po = translateSelectorFunctionReducer ( selectorKeys , function , getResultType ( ) , name , input ) ; po . setParallelism ( this . getParallelism ( ) ) ; return po ; } } else if ( grouper . getKeys ( ) instanceof Keys . ExpressionKeys ) { int [ ] logicalKeyPositions = grouper . getKeys ( ) . computeLogicalKeyPositions ( ) ; UnaryOperatorInformation < IN , OUT > operatorInfo = new UnaryOperatorInformation < > ( getInputType ( ) , getResultType ( ) ) ; GroupCombineOperatorBase < IN , OUT , GroupCombineFunction < IN , OUT > > po = new GroupCombineOperatorBase < > ( function , operatorInfo , logicalKeyPositions , name ) ; po . setInput ( input ) ; po . setParallelism ( getParallelism ( ) ) ; // set group order if ( grouper instanceof SortedGrouping ) { SortedGrouping < IN > sortedGrouper = ( SortedGrouping < IN > ) grouper ; int [ ] sortKeyPositions = sortedGrouper . getGroupSortKeyPositions ( ) ; Order [ ] sortOrders = sortedGrouper . getGroupSortOrders ( ) ; Ordering o = new Ordering ( ) ; for ( int i = 0 ; i < sortKeyPositions . length ; i ++ ) { o . appendOrdering ( sortKeyPositions [ i ] , null , sortOrders [ i ] ) ; } po . setGroupOrder ( o ) ; } return po ; } else { throw new UnsupportedOperationException ( "Unrecognized key type." ) ; }
public class StorageAccountsInner { /** * Gets the SAS token associated with the specified Data Lake Analytics and Azure Storage account and container combination . * @ param nextPageLink The NextLink from the previous successful call to List operation . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the PagedList & lt ; SasTokenInformationInner & gt ; object */ public Observable < Page < SasTokenInformationInner > > listSasTokensNextAsync ( final String nextPageLink ) { } }
return listSasTokensNextWithServiceResponseAsync ( nextPageLink ) . map ( new Func1 < ServiceResponse < Page < SasTokenInformationInner > > , Page < SasTokenInformationInner > > ( ) { @ Override public Page < SasTokenInformationInner > call ( ServiceResponse < Page < SasTokenInformationInner > > response ) { return response . body ( ) ; } } ) ;
public class RegisteredResources { /** * Delist the specified resource from the transaction . * @ param xaRes the XAResource to delist . * @ param flag the XA flags to pass to the resource . * TMSUSPEND , TMFAIL or TMSUCCESS flag to xa _ end * @ throws SystemException if the resource is not successfully * disassociated from the transaction branch . */ protected boolean delistResource ( XAResource xaRes , int flag ) throws SystemException { } }
if ( tc . isEntryEnabled ( ) ) Tr . entry ( tc , "delistResource" , new Object [ ] { xaRes , Util . printFlag ( flag ) } ) ; // get resource manager instance JTAResourceBase jtaRes = ( JTAResourceBase ) getResourceTable ( ) . get ( xaRes ) ; if ( jtaRes == null && _onePhaseResourceEnlisted != null ) { if ( _onePhaseResourceEnlisted . XAResource ( ) . equals ( xaRes ) ) jtaRes = _onePhaseResourceEnlisted ; } if ( jtaRes == null ) { Tr . error ( tc , "WTRN0065_XARESOURCE_NOT_KNOWN" , xaRes ) ; if ( tc . isEntryEnabled ( ) ) Tr . exit ( tc , "delistResource" , Boolean . FALSE ) ; return false ; } // try to end transaction association using specified flag . try { jtaRes . end ( flag ) ; } catch ( XAException xae ) { _errorCode = xae . errorCode ; // Save locally for FFDC FFDCFilter . processException ( xae , "com.ibm.tx.jta.impl.RegisteredResources.delistResource" , "711" , this ) ; if ( tc . isDebugEnabled ( ) ) Tr . debug ( tc , "XAException: error code " + XAReturnCodeHelper . convertXACode ( _errorCode ) , xae ) ; Throwable toThrow = null ; if ( _errorCode >= XAException . XA_RBBASE && _errorCode <= XAException . XA_RBEND ) { if ( tc . isEventEnabled ( ) ) Tr . event ( tc , "Transaction branch has been marked rollback-only by the RM" ) ; } else if ( _errorCode == XAException . XAER_RMFAIL ) { if ( tc . isEventEnabled ( ) ) Tr . event ( tc , "RM has failed" ) ; // Resource has rolled back jtaRes . setResourceStatus ( StatefulResource . ROLLEDBACK ) ; jtaRes . destroy ( ) ; } else // XAER _ RMERR , XAER _ INVAL , XAER _ PROTO , XAER _ NOTA { Tr . error ( tc , "WTRN0079_END_FAILED" , new Object [ ] { XAReturnCodeHelper . convertXACode ( _errorCode ) , xae } ) ; toThrow = new SystemException ( "XAResource end association error:" + XAReturnCodeHelper . convertXACode ( _errorCode ) ) . initCause ( xae ) ; } // Mark transaction as rollback only . try { _transaction . setRollbackOnly ( ) ; if ( tc . isEventEnabled ( ) ) Tr . event ( tc , "Transaction marked as rollback only." ) ; } catch ( IllegalStateException e ) { FFDCFilter . processException ( e , "com.ibm.tx.jta.impl.RegisteredResources.delistResource" , "742" , this ) ; toThrow = new SystemException ( e . getLocalizedMessage ( ) ) . initCause ( e ) ; } if ( toThrow != null ) { if ( tc . isEntryEnabled ( ) ) Tr . exit ( tc , "delistResource" , toThrow ) ; throw ( SystemException ) toThrow ; } } if ( tc . isEntryEnabled ( ) ) Tr . exit ( tc , "delistResource" , Boolean . TRUE ) ; return true ;
public class CPInstanceUtil { /** * Returns the cp instance where CPDefinitionId = & # 63 ; and sku = & # 63 ; or throws a { @ link NoSuchCPInstanceException } if it could not be found . * @ param CPDefinitionId the cp definition ID * @ param sku the sku * @ return the matching cp instance * @ throws NoSuchCPInstanceException if a matching cp instance could not be found */ public static CPInstance findByC_S ( long CPDefinitionId , String sku ) throws com . liferay . commerce . product . exception . NoSuchCPInstanceException { } }
return getPersistence ( ) . findByC_S ( CPDefinitionId , sku ) ;
public class WebhooksInner { /** * Updates a webhook with the specified parameters . * @ param resourceGroupName The name of the resource group to which the container registry belongs . * @ param registryName The name of the container registry . * @ param webhookName The name of the webhook . * @ param webhookUpdateParameters The parameters for updating a webhook . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ throws CloudException thrown if the request is rejected by server * @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @ return the WebhookInner object if successful . */ public WebhookInner beginUpdate ( String resourceGroupName , String registryName , String webhookName , WebhookUpdateParameters webhookUpdateParameters ) { } }
return beginUpdateWithServiceResponseAsync ( resourceGroupName , registryName , webhookName , webhookUpdateParameters ) . toBlocking ( ) . single ( ) . body ( ) ;
public class ReflectionUtils { /** * Get wrapper type of a primitive type . * @ param primitiveType to get its wrapper type * @ return the wrapper type of the given primitive type */ public Class < ? > getWrapperType ( Class < ? > primitiveType ) { } }
for ( PrimitiveEnum p : PrimitiveEnum . values ( ) ) { if ( p . getType ( ) . equals ( primitiveType ) ) { return p . getClazz ( ) ; } } return primitiveType ; // if not primitive , return it as is
public class HttpFields { public boolean containsKey ( String name ) { } }
FieldInfo info = getFieldInfo ( name ) ; return getField ( info , true ) != null ;
public class ElasticsearchRestClientFactoryBean { /** * Classpath root for index and mapping files ( default : / es ) * < p > Example : < / p > * < pre > * { @ code * < property name = " classpathRoot " value = " / es " / > * < / pre > * That means that the factory will look in es folder to find index and mappings settings . * < br > So if you want to define a mapping for the _ doc type in the twitter index , you * should put a _ doc . json file under / es / twitter / folder . * @ param classpathRoot Classpath root for index and mapping files * @ see # setMappings ( String [ ] ) */ public void setClasspathRoot ( String classpathRoot ) { } }
// For compatibility reasons , we need to convert " / classpathroot " to " classpathroot " if ( classpathRoot . startsWith ( "/" ) ) { this . classpathRoot = classpathRoot . substring ( 1 , classpathRoot . length ( ) ) ; } else { this . classpathRoot = classpathRoot ; }
public class BaseCrawler { /** * Copies all the Selenium cookies for the current domain to the HTTP client cookie store . */ private void syncHttpClientCookies ( ) { } }
webDriver . manage ( ) . getCookies ( ) . stream ( ) . map ( CookieConverter :: convertToHttpClientCookie ) . forEach ( cookieStore :: addCookie ) ;
public class StandardQueryFactory { /** * Returns a new or cached query for the given query specification . * @ param filter optional filter object , defaults to open filter if null * @ param values optional values object , defaults to filter initial values * @ param ordering optional order - by properties */ public Query < S > query ( Filter < S > filter , FilterValues < S > values , OrderingList < S > ordering ) throws FetchException { } }
return query ( filter , values , ordering , null ) ;
public class OvhSmsSender { /** * Handle OVH response . If status provided in response is less than 200, * then the message has been sent . Otherwise , the message has not been sent . * @ param message * the SMS to send * @ param response * the received response from OVH API * @ throws IOException * when the response couldn ' t be read * @ throws JsonProcessingException * when the response format is not valid JSON * @ throws MessageNotSentException * generated exception to indicate that the message couldn ' t be * sent */ private void handleResponse ( Sms message , Response response ) throws IOException , JsonProcessingException , MessageNotSentException { } }
if ( response . getStatus ( ) . isSuccess ( ) ) { JsonNode json = mapper . readTree ( response . getBody ( ) ) ; int ovhStatus = json . get ( "status" ) . asInt ( ) ; // 100 < = ovh status < 200 = = = = > OK - > just log response // 200 < = ovh status = = = = > KO - > throw an exception if ( ovhStatus >= OK_STATUS ) { LOG . error ( "SMS failed to be sent through OVH" ) ; LOG . debug ( "Sent SMS: {}" , message ) ; LOG . debug ( "Response status {}" , response . getStatus ( ) ) ; LOG . debug ( "Response body {}" , response . getBody ( ) ) ; throw new MessageNotSentException ( "SMS couldn't be sent through OVH: " + json . get ( "message" ) . asText ( ) , message ) ; } else { LOG . info ( "SMS successfully sent through OVH" ) ; LOG . debug ( "Sent SMS: {}" , message ) ; LOG . debug ( "Response: {}" , response . getBody ( ) ) ; } } else { LOG . error ( "Response status {}" , response . getStatus ( ) ) ; LOG . error ( "Response body {}" , response . getBody ( ) ) ; throw new MessageNotSentException ( "SMS couldn't be sent. Response status is " + response . getStatus ( ) , message ) ; }
public class JSONWorldDataHelper { /** * Builds the player position data to be used as observation signals by the listener . * @ param json a JSON object into which the positional information will be added . */ public static void buildPositionStats ( JsonObject json , EntityPlayerMP player ) { } }
json . addProperty ( "XPos" , player . posX ) ; json . addProperty ( "YPos" , player . posY ) ; json . addProperty ( "ZPos" , player . posZ ) ; json . addProperty ( "Pitch" , player . rotationPitch ) ; json . addProperty ( "Yaw" , player . rotationYaw ) ;
public class SnappyServer { /** * Define a REST endpoint mapped to HTTP PUT * @ param url The relative URL to be map this endpoint . * @ param endpoint The endpoint handler * @ param mediaTypes ( Optional ) The accepted and returned types for this endpoint */ public static void put ( String url , HttpConsumer < HttpExchange > endpoint , MediaTypes ... mediaTypes ) { } }
addResource ( Methods . PUT , url , endpoint , mediaTypes ) ;
public class CKMSQuantiles { /** * Specifies the allowable error for this rank , depending on which quantiles * are being targeted . * This is the f ( r _ i , n ) function from the CKMS paper . It ' s basically how * wide the range of this rank can be . * @ param rank * the index in the list of samples */ private double allowableError ( int rank ) { } }
// NOTE : according to CKMS , this should be count , not size , but this // leads // to error larger than the error bounds . Leaving it like this is // essentially a HACK , and blows up memory , but does " work " . // int size = count ; int size = sample . size ( ) ; double minError = size + 1 ; for ( Quantile q : quantiles ) { double error ; if ( rank <= q . quantile * size ) { error = q . u * ( size - rank ) ; } else { error = q . v * rank ; } if ( error < minError ) { minError = error ; } } return minError ;
public class CallableUtils { /** * Returns a composed function that first applies the Callable and then applies * { @ linkplain BiFunction } { @ code after } to the result . * @ param < T > return type of callable * @ param < R > return type of handler * @ param handler the function applied after callable * @ return a function composed of supplier and handler */ public static < T , R > Callable < R > andThen ( Callable < T > callable , BiFunction < T , Exception , R > handler ) { } }
return ( ) -> { try { T result = callable . call ( ) ; return handler . apply ( result , null ) ; } catch ( Exception exception ) { return handler . apply ( null , exception ) ; } } ;
public class ViewSet { /** * ( this is for backwards compatibility ) */ @ JsonSetter ( "enterpriseContextViews" ) void setEnterpriseContextViews ( Collection < SystemLandscapeView > enterpriseContextViews ) { } }
if ( enterpriseContextViews != null ) { this . systemLandscapeViews = new HashSet < > ( enterpriseContextViews ) ; }
public class druidGParser { /** * druidG . g : 332:1 : getEquals returns [ EqualsToHolder holder ] : ( a = ID ( WS ) ? EQUALS ( WS ) ? b = ( SINGLE _ QUOTE _ STRING | FLOAT | LONG ) ) ; */ public final EqualsToHolder getEquals ( ) throws RecognitionException { } }
EqualsToHolder holder = null ; Token a = null ; Token b = null ; try { // druidG . g : 333:2 : ( ( a = ID ( WS ) ? EQUALS ( WS ) ? b = ( SINGLE _ QUOTE _ STRING | FLOAT | LONG ) ) ) // druidG . g : 333:4 : ( a = ID ( WS ) ? EQUALS ( WS ) ? b = ( SINGLE _ QUOTE _ STRING | FLOAT | LONG ) ) { // druidG . g : 333:4 : ( a = ID ( WS ) ? EQUALS ( WS ) ? b = ( SINGLE _ QUOTE _ STRING | FLOAT | LONG ) ) // druidG . g : 333:5 : a = ID ( WS ) ? EQUALS ( WS ) ? b = ( SINGLE _ QUOTE _ STRING | FLOAT | LONG ) { a = ( Token ) match ( input , ID , FOLLOW_ID_in_getEquals2187 ) ; // druidG . g : 333:10 : ( WS ) ? int alt140 = 2 ; int LA140_0 = input . LA ( 1 ) ; if ( ( LA140_0 == WS ) ) { alt140 = 1 ; } switch ( alt140 ) { case 1 : // druidG . g : 333:10 : WS { match ( input , WS , FOLLOW_WS_in_getEquals2189 ) ; } break ; } match ( input , EQUALS , FOLLOW_EQUALS_in_getEquals2192 ) ; // druidG . g : 333:21 : ( WS ) ? int alt141 = 2 ; int LA141_0 = input . LA ( 1 ) ; if ( ( LA141_0 == WS ) ) { alt141 = 1 ; } switch ( alt141 ) { case 1 : // druidG . g : 333:21 : WS { match ( input , WS , FOLLOW_WS_in_getEquals2194 ) ; } break ; } b = input . LT ( 1 ) ; if ( input . LA ( 1 ) == FLOAT || input . LA ( 1 ) == LONG || input . LA ( 1 ) == SINGLE_QUOTE_STRING ) { input . consume ( ) ; state . errorRecovery = false ; } else { MismatchedSetException mse = new MismatchedSetException ( null , input ) ; throw mse ; } } holder = new EqualsToHolder ( ( a != null ? a . getText ( ) : null ) , ( b != null ? b . getText ( ) : null ) ) ; } } catch ( RecognitionException re ) { reportError ( re ) ; recover ( input , re ) ; } finally { // do for sure before leaving } return holder ;
public class AWSSimpleSystemsManagementClient { /** * Delete a custom inventory type , or the data associated with a custom Inventory type . Deleting a custom inventory * type is also referred to as deleting a custom inventory schema . * @ param deleteInventoryRequest * @ return Result of the DeleteInventory operation returned by the service . * @ throws InternalServerErrorException * An error occurred on the server side . * @ throws InvalidTypeNameException * The parameter type name is not valid . * @ throws InvalidOptionException * The delete inventory option specified is not valid . Verify the option and try again . * @ throws InvalidDeleteInventoryParametersException * One or more of the parameters specified for the delete operation is not valid . Verify all parameters and * try again . * @ throws InvalidInventoryRequestException * The request is not valid . * @ sample AWSSimpleSystemsManagement . DeleteInventory * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / ssm - 2014-11-06 / DeleteInventory " target = " _ top " > AWS API * Documentation < / a > */ @ Override public DeleteInventoryResult deleteInventory ( DeleteInventoryRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeDeleteInventory ( request ) ;
public class CmsSearchManager { /** * Sets the update frequency of the offline indexer in milliseconds . < p > * @ param offlineUpdateFrequency the update frequency in milliseconds to set */ public void setOfflineUpdateFrequency ( String offlineUpdateFrequency ) { } }
try { setOfflineUpdateFrequency ( Long . parseLong ( offlineUpdateFrequency ) ) ; } catch ( Exception e ) { LOG . error ( Messages . get ( ) . getBundle ( ) . key ( Messages . LOG_PARSE_OFFLINE_UPDATE_FAILED_2 , offlineUpdateFrequency , new Long ( DEFAULT_OFFLINE_UPDATE_FREQNENCY ) ) , e ) ; setOfflineUpdateFrequency ( DEFAULT_OFFLINE_UPDATE_FREQNENCY ) ; }
public class PerformanceProfiler { /** * Returns the duration of the measured tasks in ms */ public double getDurationMs ( ) { } }
double durationMs = 0 ; for ( Duration duration : durations ) { if ( duration . taskFinished ( ) ) { durationMs += duration . getDurationMS ( ) ; } } return durationMs ;
public class Widgets { /** * Create and return a widget using the given factory and the given options . */ protected < W extends Widget > W widget ( Element e , WidgetFactory < W > factory , WidgetInitializer < W > initializer ) { } }
if ( ! isWidgetCreationAuthorizedFrom ( e ) ) { return null ; } W widget = factory . create ( e ) ; if ( initializer != null ) { initializer . initialize ( widget , e ) ; } return widget ;
public class VpTree { /** * Builds the tree from a set of points by recursively partitioning * them according to a random pivot . * @ param lower start of range * @ param upper end of range ( exclusive ) * @ return root of the tree or null if lower = = upper */ private Node buildFromPoints ( int lower , int upper ) { } }
if ( upper == lower ) { return null ; } final Node node = new Node ( ) ; node . index = lower ; if ( upper - lower > 1 ) { // choose an arbitrary vantage point and move it to the start int i = random . nextInt ( upper - lower - 1 ) + lower ; listSwap ( items , lower , i ) ; listSwap ( indexes , lower , i ) ; int median = ( upper + lower + 1 ) / 2 ; // partition around the median distance // TODO : use the QuickSelect class ? nthElement ( lower + 1 , upper , median , items [ lower ] ) ; // what was the median ? node . threshold = distance ( items [ lower ] , items [ median ] ) ; node . index = lower ; node . left = buildFromPoints ( lower + 1 , median ) ; node . right = buildFromPoints ( median , upper ) ; } return node ;
public class BeanMap { /** * Convenience method for getting an iterator over the entries . * @ return an iterator over the entries */ public Iterator < Entry < String , Object > > entryIterator ( ) { } }
final Iterator < String > iter = keyIterator ( ) ; return new Iterator < Entry < String , Object > > ( ) { @ Override public boolean hasNext ( ) { return iter . hasNext ( ) ; } @ Override public Entry < String , Object > next ( ) { String key = iter . next ( ) ; Object value = get ( key ) ; return new MyMapEntry ( BeanMap . this , key , value ) ; } @ Override public void remove ( ) { throw new UnsupportedOperationException ( "remove() not supported for BeanMap" ) ; } } ;
public class BeamToCDK { /** * Insert the vertex ' v ' into sorted position in the array ' vs ' . * @ param v a vertex ( int id ) * @ param vs array of vertices ( int ids ) * @ return array with ' u ' inserted in sorted order */ private static int [ ] insert ( int v , int [ ] vs ) { } }
final int n = vs . length ; final int [ ] ws = Arrays . copyOf ( vs , n + 1 ) ; ws [ n ] = v ; // insert ' u ' in to sorted position for ( int i = n ; i > 0 && ws [ i ] < ws [ i - 1 ] ; i -- ) { int tmp = ws [ i ] ; ws [ i ] = ws [ i - 1 ] ; ws [ i - 1 ] = tmp ; } return ws ;
public class FunctionArgumentSignatureFactory { /** * Create a new factory . */ public List < FunctionArgumentSignature > createDefaultArgumentSignature ( Parameter parameter ) { } }
List < FunctionArgumentSignature > list = new LinkedList < > ( ) ; String name = getParameterName ( parameter ) ; Object defaultValue = getDefaultValue ( parameter ) ; list . add ( new FunctionArgumentSignature ( name , defaultValue ) ) ; return list ;
public class JsDocInfoParser { /** * Parses a string containing a JsDoc type declaration , returning the * type if the parsing succeeded or { @ code null } if it failed . */ public static Node parseTypeString ( String typeString ) { } }
JsDocInfoParser parser = getParser ( typeString ) ; return parser . parseTopLevelTypeExpression ( parser . next ( ) ) ;
public class BatchOperation { /** * Method to add the query batch operation to batchItemRequest * @ param query the query * @ param bId the batch Id */ public void addQuery ( String query , String bId ) { } }
BatchItemRequest batchItemRequest = new BatchItemRequest ( ) ; batchItemRequest . setBId ( bId ) ; batchItemRequest . setQuery ( query ) ; batchItemRequests . add ( batchItemRequest ) ; bIds . add ( bId ) ;
public class ObjectIdentifier { /** * Gets full authority for a URL by appending port to the url authority . * @ param uri the URL to get the full authority for . * @ return the full authority . */ protected String getFullAuthority ( URI uri ) { } }
String authority = uri . getAuthority ( ) ; if ( ! authority . contains ( ":" ) && uri . getPort ( ) > 0 ) { // Append port for complete authority authority = String . format ( "%s:%d" , uri . getAuthority ( ) , uri . getPort ( ) ) ; } return authority ;
public class AnalyticFormulas { /** * Calculates the Black - Scholes option value of a call , i . e . , the payoff max ( S ( T ) - K , 0 ) P , where S follows a log - normal process with constant log - volatility . * The model specific quantities are considered to be random variable , i . e . , * the function may calculate an per - path valuation in a single call . * @ param forward The forward of the underlying . * @ param volatility The Black - Scholes volatility . * @ param optionMaturity The option maturity T . * @ param optionStrike The option strike . If the option strike is & le ; 0.0 the method returns the value of the forward contract paying S ( T ) - K in T . * @ param payoffUnit The payoff unit ( e . g . , the discount factor ) * @ return Returns the value of a European call option under the Black - Scholes model . */ public static RandomVariableInterface blackScholesGeneralizedOptionValue ( RandomVariableInterface forward , RandomVariableInterface volatility , double optionMaturity , double optionStrike , RandomVariableInterface payoffUnit ) { } }
if ( optionMaturity < 0 ) { return forward . mult ( 0.0 ) ; } else { RandomVariableInterface dPlus = forward . div ( optionStrike ) . log ( ) . add ( volatility . squared ( ) . mult ( 0.5 * optionMaturity ) ) . div ( volatility ) . div ( Math . sqrt ( optionMaturity ) ) ; RandomVariableInterface dMinus = dPlus . sub ( volatility . mult ( Math . sqrt ( optionMaturity ) ) ) ; UnivariateFunction cumulativeNormal = new UnivariateFunction ( ) { public double value ( double x ) { return NormalDistribution . cumulativeDistribution ( x ) ; } } ; RandomVariableInterface valueAnalytic = dPlus . apply ( cumulativeNormal ) . mult ( forward ) . sub ( dMinus . apply ( cumulativeNormal ) . mult ( optionStrike ) ) . mult ( payoffUnit ) ; return valueAnalytic ; }
public class IndicesResource { /** * Index set */ @ GET @ Timed @ Path ( "/{indexSetId}/list" ) @ ApiOperation ( value = "List all open, closed and reopened indices." ) @ Produces ( MediaType . APPLICATION_JSON ) public AllIndices indexSetList ( @ ApiParam ( name = "indexSetId" ) @ PathParam ( "indexSetId" ) String indexSetId ) { } }
return AllIndices . create ( this . indexSetClosed ( indexSetId ) , this . indexSetReopened ( indexSetId ) , this . indexSetOpen ( indexSetId ) ) ;
public class XLinkUtils { /** * Returns a Calendarobject to a given dateString in the Format ' DATEFORMAT ' . * If the given String is of the wrong format , null is returned . * @ see XLinkUtils # DATEFORMAT */ public static Calendar dateStringToCalendar ( String dateString ) { } }
Calendar calendar = Calendar . getInstance ( ) ; SimpleDateFormat formatter = new SimpleDateFormat ( XLinkConstants . DATEFORMAT ) ; try { calendar . setTime ( formatter . parse ( dateString ) ) ; } catch ( Exception ex ) { return null ; } return calendar ;
public class PHS398CoverPageSupplementBaseGenerator { /** * This method splits the passed explanation comprising cell line * information , puts into a list and returns the list . * @ param explanation * String of cell lines * @ return { @ link List } */ protected List < String > getCellLines ( String explanation ) { } }
int startPos = 0 ; List < String > cellLines = new ArrayList < > ( ) ; for ( int commaPos = 0 ; commaPos > - 1 ; ) { commaPos = explanation . indexOf ( "," , startPos ) ; if ( commaPos >= 0 ) { String cellLine = ( explanation . substring ( startPos , commaPos ) . trim ( ) ) ; explanation = explanation . substring ( commaPos + 1 ) ; if ( cellLine . length ( ) > 0 ) { cellLines . add ( cellLine ) ; } } else if ( explanation . length ( ) > 0 ) { cellLines . add ( explanation . trim ( ) ) ; } } return cellLines ;
public class CmsADEConfigData { /** * Gets the main detail page for a specific type . < p > * @ param type the type name * @ return the main detail page for that type */ public CmsDetailPageInfo getMainDetailPage ( String type ) { } }
List < CmsDetailPageInfo > detailPages = getDetailPagesForType ( type ) ; if ( ( detailPages == null ) || detailPages . isEmpty ( ) ) { return null ; } return detailPages . get ( 0 ) ;
public class PoolUtil { /** * Helper method * @ param o items to print * @ return String for safe printing . */ protected static String safePrint ( Object ... o ) { } }
StringBuilder sb = new StringBuilder ( ) ; for ( Object obj : o ) { sb . append ( obj != null ? obj : "null" ) ; } return sb . toString ( ) ;
public class MultiUserChat { /** * Joins the chat room using the specified nickname . If already joined * using another nickname , this method will first leave the room and then * re - join using the new nickname . The default connection timeout for a reply * from the group chat server that the join succeeded will be used . After * joining the room , the room will decide the amount of history to send . * @ param nickname the nickname to use . * @ throws NoResponseException * @ throws XMPPErrorException if an error occurs joining the room . In particular , a * 401 error can occur if no password was provided and one is required ; or a * 403 error can occur if the user is banned ; or a * 404 error can occur if the room does not exist or is locked ; or a * 407 error can occur if user is not on the member list ; or a * 409 error can occur if someone is already in the group chat with the same nickname . * @ throws NoResponseException if there was no response from the server . * @ throws NotConnectedException * @ throws InterruptedException * @ throws NotAMucServiceException */ public void join ( Resourcepart nickname ) throws NoResponseException , XMPPErrorException , NotConnectedException , InterruptedException , NotAMucServiceException { } }
MucEnterConfiguration . Builder builder = getEnterConfigurationBuilder ( nickname ) ; join ( builder . build ( ) ) ;
public class LongStream { /** * Creates a { @ code LongStream } by iterative application { @ code LongUnaryOperator } function * to an initial element { @ code seed } . Produces { @ code LongStream } consisting of * { @ code seed } , { @ code f ( seed ) } , { @ code f ( f ( seed ) ) } , etc . * < p > The first element ( position { @ code 0 } ) in the { @ code LongStream } will be * the provided { @ code seed } . For { @ code n > 0 } , the element at position * { @ code n } , will be the result of applying the function { @ code f } to the * element at position { @ code n - 1 } . * < p > Example : * < pre > * seed : 1 * f : ( a ) - & gt ; a + 5 * result : [ 1 , 6 , 11 , 16 , . . . ] * < / pre > * @ param seed the initial element * @ param f a function to be applied to the previous element to produce a new element * @ return a new sequential { @ code LongStream } * @ throws NullPointerException if { @ code f } is null */ @ NotNull public static LongStream iterate ( final long seed , @ NotNull final LongUnaryOperator f ) { } }
Objects . requireNonNull ( f ) ; return new LongStream ( new LongIterate ( seed , f ) ) ;
public class DeleteFollowRequest { /** * check a delete follow edge request for validity concerning NSSP * @ param request * Tomcat servlet request * @ param deleteRequest * basic delete request object * @ param deleteFollowResponse * delete follow edge response object * @ return delete follow edge request object < br > * < b > null < / b > if the delete follow edge request is invalid */ public static DeleteFollowRequest checkRequest ( final HttpServletRequest request , final DeleteRequest deleteRequest , final DeleteFollowResponse deleteFollowResponse ) { } }
final Node user = checkUserIdentifier ( request , deleteFollowResponse ) ; if ( user != null ) { final Node followed = checkFollowedIdentifier ( request , deleteFollowResponse ) ; if ( followed != null ) { return new DeleteFollowRequest ( deleteRequest . getType ( ) , user , followed ) ; } } return null ;
public class CBADao { /** * Distribute account CBA across all COMMITTED unpaid invoices */ private void useExistingCBAFromTransaction ( final BigDecimal accountCBA , final List < Tag > invoicesTags , final EntitySqlDaoWrapperFactory entitySqlDaoWrapperFactory , final InternalCallContext context ) throws InvoiceApiException , EntityPersistenceException { } }
if ( accountCBA . compareTo ( BigDecimal . ZERO ) <= 0 ) { return ; } // PERF : Computing the invoice balance is difficult to do in the DB , so we effectively need to retrieve all invoices on the account and filter the unpaid ones in memory . // This should be infrequent though because of the account CBA check above . final List < InvoiceModelDao > allInvoices = invoiceDaoHelper . getAllInvoicesByAccountFromTransaction ( false , invoicesTags , entitySqlDaoWrapperFactory , context ) ; final List < InvoiceModelDao > unpaidInvoices = invoiceDaoHelper . getUnpaidInvoicesByAccountFromTransaction ( allInvoices , null ) ; // We order the same os BillingStateCalculator - - should really share the comparator final List < InvoiceModelDao > orderedUnpaidInvoices = Ordering . from ( new Comparator < InvoiceModelDao > ( ) { @ Override public int compare ( final InvoiceModelDao i1 , final InvoiceModelDao i2 ) { return i1 . getInvoiceDate ( ) . compareTo ( i2 . getInvoiceDate ( ) ) ; } } ) . immutableSortedCopy ( unpaidInvoices ) ; BigDecimal remainingAccountCBA = accountCBA ; for ( final InvoiceModelDao unpaidInvoice : orderedUnpaidInvoices ) { remainingAccountCBA = computeCBAComplexityAndCreateCBAItem ( remainingAccountCBA , unpaidInvoice , entitySqlDaoWrapperFactory , context ) ; if ( remainingAccountCBA . compareTo ( BigDecimal . ZERO ) <= 0 ) { break ; } }
public class HeaderCell { /** * Implementation of the { @ link IBehaviorConsumer } interface that extends the functionality of this * tag beyond that exposed via the JSP tag attributes . This method accepts the following facets : * < table > * < tr > < td > Facet Name < / td > < td > Operation < / td > < / tr > * < tr > < td > < code > renderer < / code > < / td > < td > Extends the cell decorator chain used to implement rendering for this tag < / td > < / tr > * < / table > * The < code > renderer < / code > facet accepts the following behavior names . The value of each should be a String classname * of a class that extends the { @ link CellDecorator } base class . * < table > * < tr > < td > extends < / td > < td > Add an additional decorator to the currently configured cell decorator chain . < / td > < / tr > * < tr > < td > sort < / td > < td > Replace the currently configured sort decorator with one created from this class name . < / td > < / tr > * < / table > * @ param name the name of the behavior * @ param value the value of the behavior * @ param facet th ebehavior ' s facet * @ throws JspException when the behavior ' s facet isnot recognized */ public void setBehavior ( String name , Object value , String facet ) throws JspException { } }
if ( facet != null && facet . equals ( BEHAVIOR_RENDERER_NAME ) ) { String className = value != null ? value . toString ( ) : null ; /* provides a way to extend the existing decorators */ CellDecorator cellDecorator = ( CellDecorator ) ExtensionUtil . instantiateClass ( className , CellDecorator . class ) ; if ( name . equals ( BEHAVIOR_RENDERER_NAME_EXTENDS ) ) { cellDecorator . setNestedDecorator ( getCellDecorator ( ) ) ; } else if ( name . equals ( BEHAVIOR_RENDERER_NAME_SORT ) ) { cellDecorator . setNestedDecorator ( DECORATOR_HEADER_SORTED ) ; } /* replace the core cell decorator with a new default */ else if ( name . equals ( BEHAVIOR_RENDERER_NAME_DEFAULT ) ) { /* nyi */ } _cellDecorator = cellDecorator ; } else { String s = Bundle . getString ( "Tags_BehaviorFacetNotSupported" , new Object [ ] { facet } ) ; throw new JspException ( s ) ; }
public class AreaStyle { /** * Compares two styles and decides if it is the same style . The thresholds of the style are taken from the { @ link Config } . * @ param other the other area to be compared * @ return < code > true < / code > if the areas are considered to have the same style */ public boolean isSameStyle ( AreaStyle other ) { } }
double fsdif = Math . abs ( getAverageFontSize ( ) - other . getAverageFontSize ( ) ) ; double wdif = Math . abs ( getAverageFontWeight ( ) - other . getAverageFontWeight ( ) ) ; double sdif = Math . abs ( getAverageFontStyle ( ) - other . getAverageFontStyle ( ) ) ; double ldif = Math . abs ( getAverageColorLuminosity ( ) - other . getAverageColorLuminosity ( ) ) ; Color bg1 = getBackgroundColor ( ) ; Color bg2 = other . getBackgroundColor ( ) ; return fsdif <= Config . FONT_SIZE_THRESHOLD && wdif <= Config . FONT_WEIGHT_THRESHOLD && sdif <= Config . FONT_STYLE_THRESHOLD && ldif <= Config . TEXT_LUMINOSITY_THRESHOLD && ( ( bg1 == null && bg2 == null ) || ( bg1 != null && bg2 != null && bg1 . equals ( bg2 ) ) ) ;
public class WMultiDropdownRenderer { /** * Paints the given WMultiDropdown . * @ param component the WMultiDropdown to paint . * @ param renderContext the RenderContext to paint to . */ @ Override public void doRender ( final WComponent component , final WebXmlRenderContext renderContext ) { } }
WMultiDropdown dropdown = ( WMultiDropdown ) component ; XmlStringBuilder xml = renderContext . getWriter ( ) ; String dataKey = dropdown . getListCacheKey ( ) ; boolean readOnly = dropdown . isReadOnly ( ) ; xml . appendTagOpen ( "ui:multidropdown" ) ; xml . appendAttribute ( "id" , component . getId ( ) ) ; xml . appendOptionalAttribute ( "class" , component . getHtmlClass ( ) ) ; xml . appendOptionalAttribute ( "track" , component . isTracking ( ) , "true" ) ; xml . appendOptionalAttribute ( "hidden" , dropdown . isHidden ( ) , "true" ) ; if ( readOnly ) { xml . appendAttribute ( "readOnly" , "true" ) ; } else { xml . appendOptionalAttribute ( "data" , dataKey != null && ! readOnly , dataKey ) ; xml . appendOptionalAttribute ( "disabled" , dropdown . isDisabled ( ) , "true" ) ; xml . appendOptionalAttribute ( "required" , dropdown . isMandatory ( ) , "true" ) ; xml . appendOptionalAttribute ( "submitOnChange" , dropdown . isSubmitOnChange ( ) , "true" ) ; xml . appendOptionalAttribute ( "toolTip" , component . getToolTip ( ) ) ; xml . appendOptionalAttribute ( "accessibleText" , component . getAccessibleText ( ) ) ; int min = dropdown . getMinSelect ( ) ; int max = dropdown . getMaxSelect ( ) ; xml . appendOptionalAttribute ( "min" , min > 0 , min ) ; xml . appendOptionalAttribute ( "max" , max > 0 , max ) ; xml . appendOptionalAttribute ( "title" , I18nUtilities . format ( null , InternalMessages . DEFAULT_MULTIDROPDOWN_TIP ) ) ; } xml . appendClose ( ) ; // Options List < ? > options = dropdown . getOptions ( ) ; boolean renderSelectionsOnly = dropdown . isReadOnly ( ) || dataKey != null ; if ( options != null ) { int optionIndex = 0 ; List < ? > selections = dropdown . getSelected ( ) ; for ( Object option : options ) { if ( option instanceof OptionGroup ) { xml . appendTagOpen ( "ui:optgroup" ) ; xml . appendAttribute ( "label" , ( ( OptionGroup ) option ) . getDesc ( ) ) ; xml . appendClose ( ) ; for ( Object nestedOption : ( ( OptionGroup ) option ) . getOptions ( ) ) { renderOption ( dropdown , nestedOption , optionIndex ++ , xml , selections , renderSelectionsOnly ) ; } xml . appendEndTag ( "ui:optgroup" ) ; } else { renderOption ( dropdown , option , optionIndex ++ , xml , selections , renderSelectionsOnly ) ; } } } if ( ! readOnly ) { DiagnosticRenderUtil . renderDiagnostics ( dropdown , renderContext ) ; } // End tag xml . appendEndTag ( "ui:multidropdown" ) ;
public class HttpChannelConfig { /** * Parse the configuration data into the separate values . * @ param cc */ private void parseConfig ( ChannelData cc ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) { Tr . entry ( tc , "parseConfig: " + cc . getName ( ) ) ; } Map < Object , Object > propsIn = cc . getPropertyBag ( ) ; Map < Object , Object > props = new HashMap < Object , Object > ( ) ; // convert all keys to valid case independent of case String key ; Object value ; // match keys independent of case . // So this is a bit ugly , but the parsing code is not state independent , meaning if it parses A then and // only then it will parse B , and we can ' t be certain that only properties that we know about from the HTTP Config // are in this Map ( so we can ' t just lower case everything ) . So , to be case independent we need to convert // the entries to their known internal string constants . We shouldn ' t need to configure the channel often , and there // should not be many custom properties , so performance should not be an issue . for ( Entry < Object , Object > entry : propsIn . entrySet ( ) ) { key = ( String ) entry . getKey ( ) ; value = entry . getValue ( ) ; // First comparisons are for ones exposed in metatype . xml if ( key . equalsIgnoreCase ( HttpConfigConstants . PROPNAME_KEEPALIVE_ENABLED ) ) { props . put ( HttpConfigConstants . PROPNAME_KEEPALIVE_ENABLED , value ) ; continue ; } if ( key . equalsIgnoreCase ( HttpConfigConstants . PROPNAME_MAX_PERSIST ) ) { props . put ( HttpConfigConstants . PROPNAME_MAX_PERSIST , value ) ; continue ; } if ( key . equalsIgnoreCase ( HttpConfigConstants . PROPNAME_PERSIST_TIMEOUT ) ) { props . put ( HttpConfigConstants . PROPNAME_PERSIST_TIMEOUT , value ) ; continue ; } if ( key . equalsIgnoreCase ( HttpConfigConstants . PROPNAME_READ_TIMEOUT ) ) { props . put ( HttpConfigConstants . PROPNAME_READ_TIMEOUT , value ) ; continue ; } if ( key . equalsIgnoreCase ( HttpConfigConstants . PROPNAME_WRITE_TIMEOUT ) ) { props . put ( HttpConfigConstants . PROPNAME_WRITE_TIMEOUT , value ) ; continue ; } if ( key . equalsIgnoreCase ( HttpConfigConstants . PROPNAME_JIT_ONLY_READS ) ) { props . put ( HttpConfigConstants . PROPNAME_JIT_ONLY_READS , value ) ; continue ; } if ( key . equalsIgnoreCase ( HttpConfigConstants . PROPNAME_OUTGOING_VERSION ) ) { props . put ( HttpConfigConstants . PROPNAME_OUTGOING_VERSION , value ) ; continue ; } if ( key . equalsIgnoreCase ( HttpConfigConstants . PROPNAME_DIRECT_BUFF ) ) { props . put ( HttpConfigConstants . PROPNAME_DIRECT_BUFF , value ) ; continue ; } if ( key . equalsIgnoreCase ( HttpConfigConstants . PROPNAME_OUTGOING_HDR_BUFFSIZE ) ) { props . put ( HttpConfigConstants . PROPNAME_OUTGOING_HDR_BUFFSIZE , value ) ; continue ; } if ( key . equalsIgnoreCase ( HttpConfigConstants . PROPNAME_INCOMING_HDR_BUFFSIZE ) ) { props . put ( HttpConfigConstants . PROPNAME_INCOMING_HDR_BUFFSIZE , value ) ; continue ; } if ( key . equalsIgnoreCase ( HttpConfigConstants . PROPNAME_INCOMING_BODY_BUFFSIZE ) ) { props . put ( HttpConfigConstants . PROPNAME_INCOMING_BODY_BUFFSIZE , value ) ; continue ; } if ( key . equalsIgnoreCase ( HttpConfigConstants . PROPNAME_BYTE_CACHE_SIZE ) ) { props . put ( HttpConfigConstants . PROPNAME_BYTE_CACHE_SIZE , value ) ; continue ; } if ( key . equalsIgnoreCase ( HttpConfigConstants . PROPNAME_EXTRACT_VALUE ) ) { props . put ( HttpConfigConstants . PROPNAME_EXTRACT_VALUE , value ) ; continue ; } if ( key . equalsIgnoreCase ( HttpConfigConstants . PROPNAME_BINARY_TRANSPORT ) ) { props . put ( HttpConfigConstants . PROPNAME_BINARY_TRANSPORT , value ) ; continue ; } if ( key . equalsIgnoreCase ( HttpConfigConstants . PROPNAME_LIMIT_FIELDSIZE ) ) { props . put ( HttpConfigConstants . PROPNAME_LIMIT_FIELDSIZE , value ) ; continue ; } if ( key . equalsIgnoreCase ( HttpConfigConstants . PROPNAME_LIMIT_NUMHEADERS ) ) { props . put ( HttpConfigConstants . PROPNAME_LIMIT_NUMHEADERS , value ) ; continue ; } if ( key . equalsIgnoreCase ( HttpConfigConstants . PROPNAME_LIMIT_NUMBER_RESPONSES ) ) { props . put ( HttpConfigConstants . PROPNAME_LIMIT_NUMBER_RESPONSES , value ) ; continue ; } if ( key . equalsIgnoreCase ( HttpConfigConstants . PROPNAME_MSG_SIZE_LIMIT ) ) { props . put ( HttpConfigConstants . PROPNAME_MSG_SIZE_LIMIT , value ) ; continue ; } if ( key . equalsIgnoreCase ( HttpConfigConstants . PROPNAME_ALLOW_RETRIES ) ) { props . put ( HttpConfigConstants . PROPNAME_ALLOW_RETRIES , value ) ; continue ; } if ( key . equalsIgnoreCase ( HttpConfigConstants . PROPNAME_HEADER_VALIDATION ) ) { props . put ( HttpConfigConstants . PROPNAME_HEADER_VALIDATION , value ) ; continue ; } if ( key . equalsIgnoreCase ( HttpConfigConstants . PROPNAME_STRICT_URL_FORMAT ) ) { props . put ( HttpConfigConstants . PROPNAME_STRICT_URL_FORMAT , value ) ; continue ; } if ( key . equalsIgnoreCase ( HttpConfigConstants . PROPNAME_SERVER_HEADER_VALUE ) ) { props . put ( HttpConfigConstants . PROPNAME_SERVER_HEADER_VALUE , value ) ; continue ; } if ( key . equalsIgnoreCase ( HttpConfigConstants . PROPNAME_REMOVE_SERVER_HEADER ) ) { props . put ( HttpConfigConstants . PROPNAME_REMOVE_SERVER_HEADER , value ) ; continue ; } if ( key . equalsIgnoreCase ( HttpConfigConstants . PROPNAME_DATE_HEADER_RANGE ) ) { props . put ( HttpConfigConstants . PROPNAME_DATE_HEADER_RANGE , value ) ; continue ; } if ( key . equalsIgnoreCase ( HttpConfigConstants . PROPNAME_COOKIES_CONFIGURE_NOCACHE ) ) { props . put ( HttpConfigConstants . PROPNAME_COOKIES_CONFIGURE_NOCACHE , value ) ; continue ; } if ( key . equalsIgnoreCase ( HttpConfigConstants . PROPNAME_HEADER_CHANGE_LIMIT ) ) { props . put ( HttpConfigConstants . PROPNAME_HEADER_CHANGE_LIMIT , value ) ; continue ; } if ( key . equalsIgnoreCase ( HttpConfigConstants . PROPNAME_AUTODECOMPRESSION ) ) { props . put ( HttpConfigConstants . PROPNAME_AUTODECOMPRESSION , value ) ; continue ; } if ( key . equalsIgnoreCase ( HttpConfigConstants . PROPNAME_ENABLE_SMUGGLING_PROTECTION ) ) { props . put ( HttpConfigConstants . PROPNAME_ENABLE_SMUGGLING_PROTECTION , value ) ; continue ; } if ( key . equalsIgnoreCase ( HttpConfigConstants . PROPNAME_RUNNING_ON_ZOS ) ) { props . put ( HttpConfigConstants . PROPNAME_RUNNING_ON_ZOS , value ) ; continue ; } if ( key . equalsIgnoreCase ( HttpConfigConstants . PROPNAME_SERVANT_REGION ) ) { props . put ( HttpConfigConstants . PROPNAME_SERVANT_REGION , value ) ; continue ; } if ( key . equalsIgnoreCase ( HttpConfigConstants . PROPNAME_CONTROL_REGION ) ) { props . put ( HttpConfigConstants . PROPNAME_CONTROL_REGION , value ) ; continue ; } if ( key . equalsIgnoreCase ( HttpConfigConstants . PROPNAME_V0_COOKIE_RFC1123_COMPAT ) ) { props . put ( HttpConfigConstants . PROPNAME_V0_COOKIE_RFC1123_COMPAT , value ) ; continue ; } if ( key . equalsIgnoreCase ( HttpConfigConstants . PROPNAME_DO_NOT_ALLOW_DUPLICATE_SET_COOKIES ) ) { props . put ( HttpConfigConstants . PROPNAME_DO_NOT_ALLOW_DUPLICATE_SET_COOKIES , value ) ; continue ; } if ( key . equalsIgnoreCase ( HttpConfigConstants . PROPNAME_WAIT_FOR_END_OF_MESSAGE ) ) { props . put ( HttpConfigConstants . PROPNAME_WAIT_FOR_END_OF_MESSAGE , value ) ; continue ; } if ( key . equalsIgnoreCase ( HttpConfigConstants . REMOVE_CLHEADER_IN_TEMP_STATUS_RFC7230_COMPAT ) ) { props . put ( HttpConfigConstants . REMOVE_CLHEADER_IN_TEMP_STATUS_RFC7230_COMPAT , value ) ; continue ; } if ( key . equalsIgnoreCase ( HttpConfigConstants . PROPNAME_PREVENT_RESPONSE_SPLIT ) ) { props . put ( HttpConfigConstants . PROPNAME_PREVENT_RESPONSE_SPLIT , value ) ; continue ; } if ( key . equalsIgnoreCase ( HttpConfigConstants . PROPNAME_PURGE_DATA_DURING_CLOSE ) ) { props . put ( HttpConfigConstants . PROPNAME_PURGE_DATA_DURING_CLOSE , value ) ; continue ; } if ( key . equalsIgnoreCase ( HttpConfigConstants . PROPNAME_THROW_IOE_FOR_INBOUND_CONNECTIONS ) ) { props . put ( HttpConfigConstants . PROPNAME_THROW_IOE_FOR_INBOUND_CONNECTIONS , value ) ; continue ; } if ( key . equalsIgnoreCase ( HttpConfigConstants . PROPNAME_SKIP_PATH_QUOTE ) ) { props . put ( HttpConfigConstants . PROPNAME_SKIP_PATH_QUOTE , value ) ; continue ; } if ( key . equalsIgnoreCase ( HttpConfigConstants . PROPNAME_H2_CONN_CLOSE_TIMEOUT ) ) { props . put ( HttpConfigConstants . PROPNAME_H2_CONN_CLOSE_TIMEOUT , value ) ; continue ; } if ( key . equalsIgnoreCase ( HttpConfigConstants . PROPNAME_H2_CONN_READ_WINDOW_SIZE ) ) { props . put ( HttpConfigConstants . PROPNAME_H2_CONN_READ_WINDOW_SIZE , value ) ; continue ; } if ( key . equalsIgnoreCase ( HttpConfigConstants . PROPNAME_PURGE_REMAINING_RESPONSE ) ) { props . put ( HttpConfigConstants . PROPNAME_PURGE_REMAINING_RESPONSE , value ) ; continue ; } if ( key . equalsIgnoreCase ( HttpConfigConstants . PROPNAME_PROTOCOL_VERSION ) ) { props . put ( HttpConfigConstants . PROPNAME_PROTOCOL_VERSION , value ) ; continue ; } if ( key . equalsIgnoreCase ( HttpConfigConstants . PROPNAME_H2_CONNECTION_IDLE_TIMEOUT ) ) { props . put ( HttpConfigConstants . PROPNAME_H2_CONNECTION_IDLE_TIMEOUT , value ) ; continue ; } if ( key . equalsIgnoreCase ( HttpConfigConstants . PROPNAME_H2_MAX_CONCURRENT_STREAMS ) ) { props . put ( HttpConfigConstants . PROPNAME_H2_MAX_CONCURRENT_STREAMS , value ) ; } if ( key . equalsIgnoreCase ( HttpConfigConstants . PROPNAME_H2_MAX_FRAME_SIZE ) ) { props . put ( HttpConfigConstants . PROPNAME_H2_MAX_FRAME_SIZE , value ) ; } if ( key . equalsIgnoreCase ( HttpConfigConstants . PROPNAME_REMOTE_PROXIES ) ) { props . put ( HttpConfigConstants . PROPNAME_REMOTE_PROXIES , value ) ; } if ( key . equalsIgnoreCase ( HttpConfigConstants . PROPNAME_REMOTE_IP ) ) { props . put ( HttpConfigConstants . PROPNAME_REMOTE_IP , value ) ; } if ( key . equalsIgnoreCase ( HttpConfigConstants . PROPNAME_REMOTE_IP_ACCESS_LOG ) ) { props . put ( HttpConfigConstants . PROPNAME_REMOTE_IP_ACCESS_LOG , value ) ; } props . put ( key , value ) ; } parseProtocolVersion ( props ) ; parsePersistence ( props ) ; parseOutgoingVersion ( props ) ; parseBufferType ( props ) ; parseOutgoingBufferSize ( props ) ; parseIncomingHdrBufferSize ( props ) ; parseIncomingBodyBufferSize ( props ) ; parsePersistTimeout ( props ) ; parseReadTimeout ( props ) ; parseWriteTimeout ( props ) ; parseByteCacheSize ( props ) ; parseDelayedExtract ( props ) ; parseBinaryTransport ( props ) ; parseLimitFieldSize ( props ) ; parseLimitNumberHeaders ( props ) ; parseLimitNumberResponses ( props ) ; parseLimitMessageSize ( props ) ; parseAllowRetries ( props ) ; parseLoggingInfo ( props ) ; parseHeaderValidation ( props ) ; parseStrictURLFormat ( props ) ; parseServerHeader ( props ) ; parseDateHeaderRange ( props ) ; parseCookieUpdate ( props ) ; // PI75280 parseHeaderChangeLimit ( props ) ; parseAutoDecompression ( props ) ; parseRequestSmugglingProtection ( props ) ; parsev0CookieDateRFC1123compat ( props ) ; parseDoNotAllowDuplicateSetCookies ( props ) ; // PI31734 parseWaitForEndOfMessage ( props ) ; // PI33453 parseRemoveCLHeaderInTempStatusRespRFC7230compat ( props ) ; // PI35277 parsePreventResponseSplit ( props ) ; // PI45266 parseAttemptPurgeData ( props ) ; // PI11176 parseThrowIOEForInboundConnections ( props ) ; // PI57542 parseSkipCookiePathQuotes ( props ) ; // 738893 parseH2ConnCloseTimeout ( props ) ; parseH2ConnReadWindowSize ( props ) ; parsePurgeRemainingResponseBody ( props ) ; // PI81572 parseH2ConnectionIdleTimeout ( props ) ; parseH2MaxConcurrentStreams ( props ) ; parseH2MaxFrameSize ( props ) ; parseRemoteIp ( props ) ; parseRemoteIpProxies ( props ) ; parseRemoteIpAccessLog ( props ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) { Tr . exit ( tc , "parseConfig" ) ; }
public class X509SubjectAlternativeNameUPNPrincipalResolver { /** * Get alt name seq . * @ param sanItem subject alternative name value encoded as a two elements List with elem ( 0 ) representing object id and elem ( 1) * representing object ( subject alternative name ) itself . * @ return ASN1Sequence abstraction representing subject alternative name or null if the passed in * List doesn ' t contain at least two elements . * as expected to be returned by implementation of { @ code X509Certificate . html # getSubjectAlternativeNames } * @ see < a href = " http : / / docs . oracle . com / javase / 7 / docs / api / java / security / cert / X509Certificate . html # getSubjectAlternativeNames ( ) " > * X509Certificate # getSubjectAlternativeNames < / a > */ private static ASN1Sequence getAltnameSequence ( final List sanItem ) { } }
// Should not be the case , but still , a extra " safety " check if ( sanItem . size ( ) < 2 ) { LOGGER . error ( "Subject Alternative Name List does not contain at least two required elements. Returning null principal id..." ) ; } val itemType = ( Integer ) sanItem . get ( 0 ) ; if ( itemType == 0 ) { val altName = ( byte [ ] ) sanItem . get ( 1 ) ; return getAltnameSequence ( altName ) ; } return null ;
public class SARLRuntime { /** * Returns the XML representation of the given SRE . * @ param sre the SRE to serialize . * @ return an XML representation of the given SRE . * @ throws CoreException if trying to compute the XML for the SRE state encounters a problem . */ public static String getSREAsXML ( ISREInstall sre ) throws CoreException { } }
try { final DocumentBuilderFactory factory = DocumentBuilderFactory . newInstance ( ) ; final DocumentBuilder builder = factory . newDocumentBuilder ( ) ; final Document xmldocument = builder . newDocument ( ) ; final Element sreNode = xmldocument . createElement ( "SRE" ) ; // $ NON - NLS - 1 $ sreNode . setAttribute ( "platform" , Boolean . toString ( isPlatformSRE ( sre ) ) ) ; // $ NON - NLS - 1 $ sreNode . setAttribute ( "id" , sre . getId ( ) ) ; // $ NON - NLS - 1 $ sreNode . setAttribute ( "class" , sre . getClass ( ) . getName ( ) ) ; // $ NON - NLS - 1 $ sre . getAsXML ( xmldocument , sreNode ) ; xmldocument . appendChild ( sreNode ) ; final TransformerFactory transFactory = TransformerFactory . newInstance ( ) ; final Transformer trans = transFactory . newTransformer ( ) ; try ( ByteArrayOutputStream baos = new ByteArrayOutputStream ( ) ) { final DOMSource source = new DOMSource ( xmldocument ) ; final PrintWriter flot = new PrintWriter ( baos ) ; final StreamResult xmlStream = new StreamResult ( flot ) ; trans . transform ( source , xmlStream ) ; return new String ( baos . toByteArray ( ) ) ; } } catch ( Throwable e ) { throw new CoreException ( SARLEclipsePlugin . getDefault ( ) . createStatus ( IStatus . ERROR , e ) ) ; }
public class PreferenceInputFactory { /** * Create a single - valued choice preference input . * @ param name * @ param label * @ param displayType * @ param options * @ param defaultValue * @ return */ public static Preference createSingleChoicePreference ( String name , String label , SingleChoiceDisplay displayType , List < Option > options , String defaultValue ) { } }
SingleChoicePreferenceInput input = new SingleChoicePreferenceInput ( ) ; input . setDefault ( defaultValue ) ; input . setDisplay ( displayType ) ; input . getOptions ( ) . addAll ( options ) ; Preference pref = new Preference ( ) ; pref . setName ( name ) ; pref . setLabel ( label ) ; pref . setPreferenceInput ( new JAXBElement < SingleChoicePreferenceInput > ( new QName ( "single-choice-parameter-input" ) , SingleChoicePreferenceInput . class , input ) ) ; return pref ;
public class DeletePolicyRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( DeletePolicyRequest deletePolicyRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( deletePolicyRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( deletePolicyRequest . getPolicyId ( ) , POLICYID_BINDING ) ; protocolMarshaller . marshall ( deletePolicyRequest . getDeleteAllPolicyResources ( ) , DELETEALLPOLICYRESOURCES_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class CProductPersistenceImpl { /** * Returns the c products before and after the current c product in the ordered set where uuid = & # 63 ; . * @ param CProductId the primary key of the current c product * @ param uuid the uuid * @ param orderByComparator the comparator to order the set by ( optionally < code > null < / code > ) * @ return the previous , current , and next c product * @ throws NoSuchCProductException if a c product with the primary key could not be found */ @ Override public CProduct [ ] findByUuid_PrevAndNext ( long CProductId , String uuid , OrderByComparator < CProduct > orderByComparator ) throws NoSuchCProductException { } }
CProduct cProduct = findByPrimaryKey ( CProductId ) ; Session session = null ; try { session = openSession ( ) ; CProduct [ ] array = new CProductImpl [ 3 ] ; array [ 0 ] = getByUuid_PrevAndNext ( session , cProduct , uuid , orderByComparator , true ) ; array [ 1 ] = cProduct ; array [ 2 ] = getByUuid_PrevAndNext ( session , cProduct , uuid , orderByComparator , false ) ; return array ; } catch ( Exception e ) { throw processException ( e ) ; } finally { closeSession ( session ) ; }
public class LdapConnectionWrapper { /** * Retrieves a list of all the groups in the directory . * @ param dirContext a DirContext * @ return A list of Strings representing the fully qualified DN of each group * @ throws NamingException if an exception if thrown * @ since 1.4.0 */ public List < String > getGroups ( final DirContext dirContext ) throws NamingException { } }
LOGGER . debug ( "Retrieving all groups" ) ; final List < String > groupDns = new ArrayList < > ( ) ; final SearchControls sc = new SearchControls ( ) ; sc . setSearchScope ( SearchControls . SUBTREE_SCOPE ) ; final NamingEnumeration < SearchResult > ne = dirContext . search ( BASE_DN , GROUPS_FILTER , sc ) ; while ( hasMoreEnum ( ne ) ) { final SearchResult result = ne . next ( ) ; groupDns . add ( result . getNameInNamespace ( ) ) ; LOGGER . debug ( "Found group: " + result . getNameInNamespace ( ) ) ; } closeQuietly ( ne ) ; return groupDns ;
public class Dsn { /** * Extracts the scheme and additional protocol options from the DSN provided as an { @ code URI } . * @ param dsnUri DSN as an URI . */ private void extractProtocolInfo ( URI dsnUri ) { } }
String scheme = dsnUri . getScheme ( ) ; if ( scheme == null ) { return ; } String [ ] schemeDetails = scheme . split ( "\\+" ) ; protocolSettings . addAll ( Arrays . asList ( schemeDetails ) . subList ( 0 , schemeDetails . length - 1 ) ) ; protocol = schemeDetails [ schemeDetails . length - 1 ] ;
public class ListAuditFindingsResult { /** * The findings ( results ) of the audit . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setFindings ( java . util . Collection ) } or { @ link # withFindings ( java . util . Collection ) } if you want to override * the existing values . * @ param findings * The findings ( results ) of the audit . * @ return Returns a reference to this object so that method calls can be chained together . */ public ListAuditFindingsResult withFindings ( AuditFinding ... findings ) { } }
if ( this . findings == null ) { setFindings ( new java . util . ArrayList < AuditFinding > ( findings . length ) ) ; } for ( AuditFinding ele : findings ) { this . findings . add ( ele ) ; } return this ;
public class BeanUtil { /** * Map转换为Bean对象 * @ param < T > Bean类型 * @ param map { @ link Map } * @ param beanClass Bean Class * @ param isIgnoreError 是否忽略注入错误 * @ return Bean */ public static < T > T mapToBean ( Map < ? , ? > map , Class < T > beanClass , boolean isIgnoreError ) { } }
return fillBeanWithMap ( map , ReflectUtil . newInstance ( beanClass ) , isIgnoreError ) ;
public class DoubleCheckedLocking { /** * Matches an instance of DCL . The canonical pattern is : * < pre > { @ code * if ( $ X = = null ) { * synchronized ( . . . ) { * if ( $ X = = null ) { * } < / pre > * Gaps before the synchronized or inner ' if ' statement are ignored , and the operands in the * null - checks are accepted in either order . */ @ Nullable static DCLInfo findDCL ( IfTree outerIf ) { } }
// TODO ( cushon ) : Optional . ifPresent . . . ExpressionTree outerIfTest = getNullCheckedExpression ( outerIf . getCondition ( ) ) ; if ( outerIfTest == null ) { return null ; } SynchronizedTree synchTree = getChild ( outerIf . getThenStatement ( ) , SynchronizedTree . class ) ; if ( synchTree == null ) { return null ; } IfTree innerIf = getChild ( synchTree . getBlock ( ) , IfTree . class ) ; if ( innerIf == null ) { return null ; } ExpressionTree innerIfTest = getNullCheckedExpression ( innerIf . getCondition ( ) ) ; if ( innerIfTest == null ) { return null ; } Symbol outerSym = ASTHelpers . getSymbol ( outerIfTest ) ; if ( ! Objects . equals ( outerSym , ASTHelpers . getSymbol ( innerIfTest ) ) ) { return null ; } if ( ! ( outerSym instanceof VarSymbol ) ) { return null ; } VarSymbol var = ( VarSymbol ) outerSym ; return DCLInfo . create ( outerIf , synchTree , innerIf , var ) ;
public class ComplexNumber { /** * Get real part from the complex numbers . * @ param cn Complex numbers . * @ return Real part . */ public static double [ ] [ ] getReal ( ComplexNumber [ ] [ ] cn ) { } }
double [ ] [ ] n = new double [ cn . length ] [ cn [ 0 ] . length ] ; for ( int i = 0 ; i < n . length ; i ++ ) { for ( int j = 0 ; j < n [ 0 ] . length ; j ++ ) { n [ i ] [ j ] = cn [ i ] [ j ] . real ; } } return n ;
public class PriorityParametersImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public void eSet ( int featureID , Object newValue ) { } }
switch ( featureID ) { case BpsimPackage . PRIORITY_PARAMETERS__INTERRUPTIBLE : setInterruptible ( ( Parameter ) newValue ) ; return ; case BpsimPackage . PRIORITY_PARAMETERS__PRIORITY : setPriority ( ( Parameter ) newValue ) ; return ; } super . eSet ( featureID , newValue ) ;
public class Completable { /** * Returns a Completable which subscribes to this and the other Completable and completes * when both of them complete or one emits an error . * < img width = " 640 " height = " 442 " src = " https : / / raw . github . com / wiki / ReactiveX / RxJava / images / rx - operators / Completable . mergeWith . png " alt = " " > * < dl > * < dt > < b > Scheduler : < / b > < / dt > * < dd > { @ code mergeWith } does not operate by default on a particular { @ link Scheduler } . < / dd > * < / dl > * @ param other the other Completable instance * @ return the new Completable instance * @ throws NullPointerException if other is null */ @ CheckReturnValue @ SchedulerSupport ( SchedulerSupport . NONE ) public final Completable mergeWith ( CompletableSource other ) { } }
ObjectHelper . requireNonNull ( other , "other is null" ) ; return mergeArray ( this , other ) ;
public class BaseXMLBuilder { /** * Add a named attribute value to the element for this builder node . * @ param name * the attribute ' s name . * @ param value * the attribute ' s value . */ protected void attributeImpl ( String name , String value ) { } }
if ( ! ( this . xmlNode instanceof Element ) ) { throw new RuntimeException ( "Cannot add an attribute to non-Element underlying node: " + this . xmlNode ) ; } ( ( Element ) xmlNode ) . setAttribute ( name , value ) ;
public class TagLinkToken { /** * winkler scorer . Compute the Winkler heuristic as in Winkler 1999. * @ param score double * @ param S String * @ param T String * @ return double */ private double winkler ( double totalScore , String S , String T ) { } }
totalScore = totalScore + ( getPrefix ( S , T ) * 0.1 * ( 1.0 - totalScore ) ) ; return totalScore ;
public class SourceStream { /** * This method is called when a Nack message is received from the * downstream ME corresponding to this InternalOutputStream . * It sends Value and Silence messages downstream for any ticks * in these states in the stream , and for any ticks in Unknown or * Requested state it sends a Nack upstream . * @ exception GDException Can be thrown from writeRange * @ return null The corresponding method on the SourceStream returns * the list of messages to be deleted from the ItemStream . * The calling code relies on this method returning null to * indiacte that no messages should be deleted . */ public void processNack ( ControlNack nm ) throws SIResourceException { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( this , tc , "processNack" , nm ) ; boolean sendPending = false ; List sendList = new ArrayList ( ) ; boolean sendLeadingSilence = false ; long lsstart = 0 ; long lsend = 0 ; boolean sendTrailingSilence = false ; long tsstart = 0 ; long tsend = 0 ; long startstamp = nm . getStartTick ( ) ; long endstamp = nm . getEndTick ( ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { SibTr . debug ( tc , "processNack from: " + startstamp + " to " + endstamp + " on Stream " + stream ) ; } long completedPrefix ; // the TickRange to hold information discovered as we traverse stream TickRange r = null ; // Go through oststream and see which ticks in the Nack range are // Completed or Value or Uncommitted and send appropriate messages // for these ticks . synchronized ( this ) { // SIB0105 // Update the health state of this stream getControlAdapter ( ) . getHealthState ( ) . updateHealth ( HealthStateListener . NACK_RECEIVED_STATE , HealthState . AMBER ) ; lastNackReceivedTick = endstamp ; inboundFlow = true ; completedPrefix = oststream . getCompletedPrefix ( ) ; // If some of the ticks in the Nack range are before the completedPrefix // of our Stream we send a Silence message from the startstamp // to the completedPrefix . if ( startstamp <= completedPrefix ) { sendLeadingSilence = true ; lsstart = startstamp ; lsend = completedPrefix ; // Some of the ticks in the Nack range are before the completedPrefix // of our Stream so start from there startstamp = completedPrefix + 1 ; } // If there are any tick in the Nack range which are not yet // complete process these now if ( endstamp > completedPrefix ) { oststream . setCursor ( startstamp ) ; // Get the first TickRange TickRange tr = oststream . getNext ( ) ; TickRange tr2 = null ; while ( ( tr . startstamp <= endstamp ) && ( tr != tr2 ) ) { if ( ( tr . type == TickRange . Unknown ) || ( tr . type == TickRange . Requested ) ) { break ; } else if ( tr . type == TickRange . Value ) { // Do we have a previous Value message to add // to the list if ( sendPending ) { sendList . add ( r ) ; } // If message is inside sendWindow // copy the Value tick range into r if ( msgCanBeSent ( tr . valuestamp , true ) ) { r = ( TickRange ) tr . clone ( ) ; sendPending = true ; } else { // If this message is outside the sendWindow we should stop break ; } } else if ( tr . type == TickRange . Uncommitted ) { // If there is a previous Value message in the list // we can put any Completed ticks between that and this // Uncommitted tick into it . if ( sendPending ) { // If there are Completed ticks between // the Value and Uncommitted ticks // Add them to the end of the Value message if ( tr . valuestamp > ( r . valuestamp + 1 ) ) { r . endstamp = tr . valuestamp - 1 ; } sendList . add ( r ) ; sendPending = false ; } } tr2 = tr ; tr = oststream . getNext ( ) ; } // end while // If we finish on a Completed range then add this to the // last Value in our list // Check for null as we may have dropped out first time round loop // above without ever initialising tr2 if ( ( tr2 != null ) && ( tr2 . type == TickRange . Completed ) ) { if ( sendPending ) { r . endstamp = tr2 . endstamp ; } else { // Need to send this Completed range in a Silence // message as there is no Value to add it to // This may be because the whole Nack range is Completed or // because the previous range was Uncommitted // or because the previous range was outside the sendWindow if ( msgCanBeSent ( tr2 . startstamp , true ) ) { sendTrailingSilence = true ; tsstart = tr2 . startstamp ; tsend = tr2 . endstamp ; } } } if ( sendPending ) { sendList . add ( r ) ; } } } // end sync // Send any Silence at start of Nack range if ( sendLeadingSilence ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { SibTr . debug ( tc , "processNack sending Silence from: " + lsstart + " to " + lsend + " on Stream " + stream ) ; } try { downControl . sendSilenceMessage ( lsstart , lsend , completedPrefix , true , nm . getPriority ( ) . intValue ( ) , nm . getReliability ( ) , stream ) ; } catch ( SIResourceException e ) { // FFDC FFDCFilter . processException ( e , "com.ibm.ws.sib.processor.gd.SourceStream.processNack" , "1:1222:1.138" , this ) ; SibTr . exception ( tc , e ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "processNack" , e ) ; throw e ; } } // send and messages in Nack range if ( sendList . size ( ) != 0 ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { SibTr . debug ( tc , "processNack resending Value messages on Stream " + stream ) ; } // second parameter indicates that this is a resend in response to a Nack sendMsgs ( sendList , true ) ; } // send any Silence at end of Nack range if ( sendTrailingSilence ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { SibTr . debug ( tc , "processNack sending Silence from: " + tsstart + " to " + tsend + " on Stream " + stream ) ; } try { downControl . sendSilenceMessage ( tsstart , tsend , completedPrefix , true , nm . getPriority ( ) . intValue ( ) , nm . getReliability ( ) , stream ) ; } catch ( SIResourceException e ) { // FFDC FFDCFilter . processException ( e , "com.ibm.ws.sib.processor.gd.SourceStream.processNack" , "1:1267:1.138" , this ) ; SibTr . exception ( tc , e ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "processNack" , e ) ; throw e ; } } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "processNack" ) ;
public class DefaultElementProducer { /** * Create a Anchor , style it and add the data * @ param data * @ param stylers * @ return * @ throws VectorPrintException */ public Anchor createAnchor ( Object data , Collection < ? extends BaseStyler > stylers ) throws VectorPrintException { } }
return initTextElementArray ( styleHelper . style ( new Anchor ( Float . NaN ) , data , stylers ) , data , stylers ) ;
public class Single { /** * Instructs a Single to pass control to another Single rather than invoking * { @ link SingleObserver # onError ( Throwable ) } if it encounters an error . * < img width = " 640 " height = " 451 " src = " https : / / raw . github . com / wiki / ReactiveX / RxJava / images / rx - operators / Single . onErrorResumeNext . png " alt = " " > * By default , when a Single encounters an error that prevents it from emitting the expected item to * its { @ link SingleObserver } , the Single invokes its SingleObserver ' s { @ code onError } method , and then quits * without invoking any more of its SingleObserver ' s methods . The { @ code onErrorResumeNext } method changes this * behavior . If you pass another Single ( { @ code resumeSingleInCaseOfError } ) to a Single ' s * { @ code onErrorResumeNext } method , if the original Single encounters an error , instead of invoking its * SingleObserver ' s { @ code onError } method , it will instead relinquish control to { @ code resumeSingleInCaseOfError } which * will invoke the SingleObserver ' s { @ link SingleObserver # onSuccess onSuccess } method if it is able to do so . In such a case , * because no Single necessarily invokes { @ code onError } , the SingleObserver may never know that an error * happened . * You can use this to prevent errors from propagating or to supply fallback data should errors be * encountered . * < dl > * < dt > < b > Scheduler : < / b > < / dt > * < dd > { @ code onErrorResumeNext } does not operate by default on a particular { @ link Scheduler } . < / dd > * < / dl > * @ param resumeSingleInCaseOfError a Single that will take control if source Single encounters an error . * @ return the original Single , with appropriately modified behavior . * @ see < a href = " http : / / reactivex . io / documentation / operators / catch . html " > ReactiveX operators documentation : Catch < / a > */ @ CheckReturnValue @ SchedulerSupport ( SchedulerSupport . NONE ) public final Single < T > onErrorResumeNext ( final Single < ? extends T > resumeSingleInCaseOfError ) { } }
ObjectHelper . requireNonNull ( resumeSingleInCaseOfError , "resumeSingleInCaseOfError is null" ) ; return onErrorResumeNext ( Functions . justFunction ( resumeSingleInCaseOfError ) ) ;
public class CommerceAccountOrganizationRelLocalServiceWrapper { /** * Adds the commerce account organization rel to the database . Also notifies the appropriate model listeners . * @ param commerceAccountOrganizationRel the commerce account organization rel * @ return the commerce account organization rel that was added */ @ Override public com . liferay . commerce . account . model . CommerceAccountOrganizationRel addCommerceAccountOrganizationRel ( com . liferay . commerce . account . model . CommerceAccountOrganizationRel commerceAccountOrganizationRel ) { } }
return _commerceAccountOrganizationRelLocalService . addCommerceAccountOrganizationRel ( commerceAccountOrganizationRel ) ;
public class EConv { /** * / * output _ hex _ charref */ private int outputHexCharref ( ) { } }
final byte [ ] utfBytes ; final int utfP ; int utfLen ; if ( caseInsensitiveEquals ( lastError . source , "UTF-32BE" . getBytes ( ) ) ) { utfBytes = lastError . errorBytes ; utfP = lastError . errorBytesP ; utfLen = lastError . errorBytesLength ; } else { Ptr utfLenA = new Ptr ( ) ; // TODO : better calculation ? byte [ ] utfBuf = new byte [ lastError . errorBytesLength * UTF32BEEncoding . INSTANCE . maxLength ( ) ] ; utfBytes = allocateConvertedString ( lastError . source , "UTF-32BE" . getBytes ( ) , lastError . errorBytes , lastError . errorBytesP , lastError . errorBytesLength , utfBuf , utfLenA ) ; if ( utfBytes == null ) return - 1 ; utfP = 0 ; utfLen = utfLenA . p ; } if ( utfLen % 4 != 0 ) return - 1 ; int p = utfP ; while ( 4 <= utfLen ) { int u = 0 ; // long ? ? u += ( utfBytes [ p ] & 0xff ) << 24 ; u += ( utfBytes [ p + 1 ] & 0xff ) << 16 ; u += ( utfBytes [ p + 2 ] & 0xff ) << 8 ; u += ( utfBytes [ p + 3 ] & 0xff ) ; byte [ ] charrefbuf = String . format ( "&#x%X;" , u ) . getBytes ( ) ; // FIXME : use faster sprintf ? ? if ( insertOutput ( charrefbuf , 0 , charrefbuf . length , "US-ASCII" . getBytes ( ) ) == - 1 ) return - 1 ; p += 4 ; utfLen -= 4 ; } return 0 ;
public class LevelOrderAxis { /** * { @ inheritDoc } */ @ Override public void reset ( final long paramNodeKey ) { } }
super . reset ( paramNodeKey ) ; mFirstChildKeyList = new LinkedList < Long > ( ) ; if ( isSelfIncluded ( ) ) { mNextKey = getNode ( ) . getDataKey ( ) ; } else { if ( ( ( ITreeStructData ) getNode ( ) ) . hasRightSibling ( ) ) { mNextKey = ( ( ITreeStructData ) getNode ( ) ) . getRightSiblingKey ( ) ; } else if ( ( ( ITreeStructData ) getNode ( ) ) . hasFirstChild ( ) ) { mNextKey = ( ( ITreeStructData ) getNode ( ) ) . getFirstChildKey ( ) ; } else { mNextKey = NULL_NODE ; } }
public class QueryBuilder { /** * Set the parent IDs that will be used to constraint the query results ; replacing any * previously configured IDs . Use the addParentIds ( ) and removeParentIds ( ) methods to * modify the existing configuration . * @ param ids the new set of parent IDs used to constrain the query results * @ return this */ public QueryBuilder parentIds ( final Set < Integer > ids ) { } }
parentIds = new HashSet < Integer > ( ) ; if ( ids != null ) { parentIds . addAll ( ids ) ; } return this ;