signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class UpdateEndpointRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( UpdateEndpointRequest updateEndpointRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( updateEndpointRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( updateEndpointRequest . getApplicationId ( ) , APPLICATIONID_BINDING ) ; protocolMarshaller . marshall ( updateEndpointRequest . getEndpointId ( ) , ENDPOINTID_BINDING ) ; protocolMarshaller . marshall ( updateEndpointRequest . getEndpointRequest ( ) , ENDPOINTREQUEST_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class MpJwtAppSetupUtils { /** * create app with loginConfig set to Form Login in WEB . xml , and Basic in the App * @ param server * @ throws Exception */ public void deployMicroProfileLoginConfigFormLoginInWebXmlBasicInApp ( LibertyServer server ) throws Exception { } }
List < String > classList = createAppClassListBuildAppNames ( "CommonMicroProfileMarker_FormLoginInWeb_BasicInApp" , "MicroProfileLoginConfigFormLoginInWebXmlBasicInApp" ) ; ShrinkHelper . exportAppToServer ( server , genericCreateArchiveWithJsps ( MpJwtFatConstants . LOGINCONFIG_FORM_LOGIN_IN_WEB_XML_SERVLET_BASIC_IN_APP_ROOT_CONTEXT , classList ) ) ; server . addInstalledAppForValidation ( MpJwtFatConstants . LOGINCONFIG_FORM_LOGIN_IN_WEB_XML_SERVLET_BASIC_IN_APP_ROOT_CONTEXT ) ;
public class BaseSpscLinkedArrayQueue { /** * { @ inheritDoc } * This implementation is correct for single producer thread use only . */ @ Override public boolean offer ( final E e ) { } }
// Objects . requireNonNull ( e ) ; if ( null == e ) { throw new NullPointerException ( ) ; } // local load of field to avoid repeated loads after volatile reads final E [ ] buffer = producerBuffer ; final long index = lpProducerIndex ( ) ; final long mask = producerMask ; final long offset = calcElementOffset ( index , mask ) ; // expected hot path if ( index < producerBufferLimit ) { writeToQueue ( buffer , e , index , offset ) ; return true ; } return offerColdPath ( buffer , mask , index , offset , e , null ) ;
public class UintMap { /** * Get object value assigned with key . * @ return key object value or null if key is absent */ public Object getObject ( int key ) { } }
if ( key < 0 ) Kit . codeBug ( ) ; if ( values != null ) { int index = findIndex ( key ) ; if ( 0 <= index ) { return values [ index ] ; } } return null ;
public class PseudoClassSpecifierChecker { /** * Add { @ code : first - child } elements . * @ see < a href = " http : / / www . w3 . org / TR / css3 - selectors / # first - child - pseudo " > < code > : first - child < / code > pseudo - class < / a > */ private void addFirstChildElements ( ) { } }
for ( Node node : nodes ) { Index index = helper . getIndexInParent ( node , false ) ; if ( index . index == 0 ) result . add ( node ) ; }
public class MultiStatementCypherSubGraphExporter { /** * - - - - Relationships - - - - */ private void exportRelationships ( PrintWriter out , Reporter reporter , int batchSize ) { } }
if ( graph . getRelationships ( ) . iterator ( ) . hasNext ( ) ) { begin ( out ) ; appendRelationships ( out , batchSize , reporter ) ; commit ( out ) ; out . flush ( ) ; }
public class HttpJsonSerializer { /** * Helper object for the format calls to wrap the JSON response in a JSONP * function if requested . Used for code dedupe . * @ param obj The object to serialize * @ return A ChannelBuffer to pass on to the query * @ throws JSONException if serialization failed */ private ChannelBuffer serializeJSON ( final Object obj ) { } }
if ( query . hasQueryStringParam ( "jsonp" ) ) { return ChannelBuffers . wrappedBuffer ( JSON . serializeToJSONPBytes ( query . getQueryStringParam ( "jsonp" ) , obj ) ) ; } return ChannelBuffers . wrappedBuffer ( JSON . serializeToBytes ( obj ) ) ;
public class ResourceConverter { /** * Registers relationship resolver for given type . Resolver will be used if relationship resolution is enabled * trough relationship annotation . * @ param resolver resolver instance * @ param type type */ public void setTypeResolver ( RelationshipResolver resolver , Class < ? > type ) { } }
if ( resolver != null ) { String typeName = ReflectionUtils . getTypeName ( type ) ; if ( typeName != null ) { typedResolvers . put ( type , resolver ) ; } }
public class MetricUtils { /** * a metric name composites of : type @ topologyId @ componentId @ taskId @ streamId @ group @ name for non - worker metrics * OR type @ topologyId @ host @ port @ group @ name for worker metrics */ public static String metricName ( String type , String topologyId , String componentId , int taskId , String streamId , String group , String name ) { } }
return concat ( type , topologyId , componentId , taskId , streamId , group , name ) ;
public class ServerTransportAcceptListener { /** * Called when we are about to accept a connection from a peer . * @ param cfConversation */ public ConversationReceiveListener acceptConnection ( Conversation cfConversation ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "acceptConnection" , cfConversation ) ; // Add this conversation to our active conversations list synchronized ( activeConversations ) { Object connectionReference = cfConversation . getConnectionReference ( ) ; ArrayList list = ( ArrayList ) activeConversations . get ( connectionReference ) ; if ( list == null ) { list = new ArrayList ( ) ; activeConversations . put ( connectionReference , list ) ; } else { // This is mank - but if TRM does a redirect it is possible that we may get a // connection but then they close the Conversation directly without sending us any // kind of close flow . As such , every time we connect we should have a check on all // the conversations to ensure they are not closed , and if they are remove them from // the list . ArrayList removeList = new ArrayList ( ) ; for ( int x = 0 ; x < list . size ( ) ; x ++ ) { Conversation conv = ( Conversation ) list . get ( x ) ; if ( conv . isClosed ( ) ) { removeList . add ( conv ) ; } } // Actually do the remove . . . for ( int x = 0 ; x < removeList . size ( ) ; x ++ ) { list . remove ( removeList . get ( x ) ) ; } } list . add ( cfConversation ) ; // At this point we have a look to see if the connection closed listener has been set . // If it has not been set then this is a new connection ( or one with no active // Conversations on it . We must set ourselves as the listener in the event that the // socket terminates before any SI connections are established and so any cleanup // required . // Note that once a connection to the ME is established , this listener is overwritten // with one in ServerSideConnection . This also performs the same cleanup but also does // MFP cleanup as well ( which is not appropriate if the connection goes down at this // stage ) . if ( cfConversation . getConnectionClosedListener ( ConversationUsageType . JFAP ) == null ) { cfConversation . addConnectionClosedListener ( this , ConversationUsageType . JFAP ) ; } } if ( cfConversation . getAttachment ( ) == null ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) SibTr . debug ( tc , "Creating conversation state" ) ; cfConversation . setAttachment ( new ConversationState ( ) ) ; } if ( cfConversation . getLinkLevelAttachment ( ) == null ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) SibTr . debug ( tc , "Creating link level state" ) ; cfConversation . setLinkLevelAttachment ( new ServerLinkLevelState ( ) ) ; } // Set a hint that this conversation is being used for ME to client communications . cfConversation . setConversationType ( Conversation . CLIENT ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "acceptConnection" , serverTransportReceiveListener ) ; return serverTransportReceiveListener ;
public class ConsonantUtil { /** * * * * * * BEGINNING OF FUNCTION * * * * * */ / * / public static boolean is_jhalanta ( String str ) { } }
// System . out . print ( " Entered is _ jhalanta , returning : " ) ; String s1 = VarnaUtil . getAntyaVarna ( str ) ; if ( is_jhal ( s1 ) ) { // Log . logInfo ( " true " ) ; return true ; } // Log . logInfo ( " false " ) ; return false ;
public class NormOps_DDRM { /** * Computes the Frobenius matrix norm : < br > * < br > * normF = Sqrt { & sum ; < sub > i = 1 : m < / sub > & sum ; < sub > j = 1 : n < / sub > { a < sub > ij < / sub > < sup > 2 < / sup > } } * This is equivalent to the element wise p = 2 norm . See { @ link # fastNormF } for another implementation * that is faster , but more prone to underflow / overflow errors . * @ param a The matrix whose norm is computed . Not modified . * @ return The norm ' s value . */ public static double normF ( DMatrixD1 a ) { } }
double total = 0 ; double scale = CommonOps_DDRM . elementMaxAbs ( a ) ; if ( scale == 0.0 ) return 0.0 ; final int size = a . getNumElements ( ) ; for ( int i = 0 ; i < size ; i ++ ) { double val = a . get ( i ) / scale ; total += val * val ; } return scale * Math . sqrt ( total ) ;
public class S3Sample { /** * Creates a temporary file with text data to demonstrate uploading a file * to Amazon S3 * @ return A newly created temporary file with text data . * @ throws IOException */ private static File createSampleFile ( ) throws IOException { } }
File file = File . createTempFile ( "aws-java-sdk-" , ".txt" ) ; file . deleteOnExit ( ) ; Writer writer = new OutputStreamWriter ( new FileOutputStream ( file ) ) ; writer . write ( "abcdefghijklmnopqrstuvwxyz\n" ) ; writer . write ( "01234567890112345678901234\n" ) ; writer . write ( "!@#$%^&*()-=[]{};':',.<>/?\n" ) ; writer . write ( "01234567890112345678901234\n" ) ; writer . write ( "abcdefghijklmnopqrstuvwxyz\n" ) ; writer . close ( ) ; return file ;
public class ButtonTemplate { /** * Build and get message as a string * @ return String the final message */ public String build ( ) { } }
this . message_string = "{" ; if ( this . recipient_id != null ) { this . message_string += "\"recipient\": {\"id\": \"" + this . recipient_id + "\"}," ; } if ( ( this . message_text != null ) && ! ( this . message_text . equals ( "" ) ) && ! ( this . buttons . isEmpty ( ) ) ) { this . message_string += "\"message\": {" ; this . message_string += "\"attachment\": {" ; this . message_string += "\"type\": \"template\"," ; this . message_string += "\"payload\": {" ; this . message_string += "\"template_type\": \"button\"," ; this . message_string += "\"text\": \"" + this . message_text + "\"," ; this . message_string += "\"buttons\":[" ; for ( int j = 0 ; j < this . buttons . size ( ) ; j ++ ) { HashMap < String , String > button = this . buttons . get ( j ) ; this . message_string += "{" ; if ( ! button . get ( "type" ) . equals ( "" ) ) { this . message_string += "\"type\":\"" + button . get ( "type" ) + "\"," ; } if ( ! button . get ( "title" ) . equals ( "" ) ) { this . message_string += "\"title\":\"" + button . get ( "title" ) + "\"," ; } if ( ! button . get ( "url" ) . equals ( "" ) ) { this . message_string += "\"url\":\"" + button . get ( "url" ) + "\"," ; } if ( ! button . get ( "payload" ) . equals ( "" ) ) { this . message_string += "\"payload\":\"" + button . get ( "payload" ) + "\"," ; } this . message_string = this . message_string . replaceAll ( ",$" , "" ) ; this . message_string += "}," ; } this . message_string = this . message_string . replaceAll ( ",$" , "" ) ; this . message_string += "]" ; this . message_string += "}" ; this . message_string += "}" ; this . message_string += "}" ; } this . message_string = this . message_string . replaceAll ( ",$" , "" ) ; this . message_string += "}" ; return this . message_string ;
public class IndexHelper { /** * The index of the IndexInfo in which a scan starting with @ name should begin . * @ param name * name of the index * @ param indexList * list of the indexInfo objects * @ param comparator * comparator type * @ param reversed * is name reversed * @ return int index */ public static int indexFor ( Composite name , List < IndexInfo > indexList , CType comparator , boolean reversed , int lastIndex ) { } }
if ( name . isEmpty ( ) ) return lastIndex >= 0 ? lastIndex : reversed ? indexList . size ( ) - 1 : 0 ; if ( lastIndex >= indexList . size ( ) ) return - 1 ; IndexInfo target = new IndexInfo ( name , name , 0 , 0 ) ; /* Take the example from the unit test , and say your index looks like this : [0 . . 5 ] [ 10 . . 15 ] [ 20 . . 25] and you look for the slice [ 13 . . 17 ] . When doing forward slice , we we doing a binary search comparing 13 ( the start of the query ) to the lastName part of the index slot . You ' ll end up with the " first " slot , going from left to right , that may contain the start . When doing a reverse slice , we do the same thing , only using as a start column the end of the query , i . e . 17 in this example , compared to the firstName part of the index slots . bsearch will give us the first slot where firstName > start ( [ 20 . . 25 ] here ) , so we subtract an extra one to get the slot just before . */ int startIdx = 0 ; List < IndexInfo > toSearch = indexList ; if ( lastIndex >= 0 ) { if ( reversed ) { toSearch = indexList . subList ( 0 , lastIndex + 1 ) ; } else { startIdx = lastIndex ; toSearch = indexList . subList ( lastIndex , indexList . size ( ) ) ; } } int index = Collections . binarySearch ( toSearch , target , getComparator ( comparator , reversed ) ) ; return startIdx + ( index < 0 ? - index - ( reversed ? 2 : 1 ) : index ) ;
public class SequenceFile { /** * Construct the preferred type of ' raw ' SequenceFile Writer . * @ param conf The configuration . * @ param out The stream on top which the writer is to be constructed . * @ param keyClass The ' key ' type . * @ param valClass The ' value ' type . * @ param compressionType The compression type . * @ param codec The compression codec . * @ return Returns the handle to the constructed SequenceFile Writer . * @ throws IOException */ public static Writer createWriter ( Configuration conf , FSDataOutputStream out , Class keyClass , Class valClass , CompressionType compressionType , CompressionCodec codec ) throws IOException { } }
Writer writer = createWriter ( conf , out , keyClass , valClass , compressionType , codec , new Metadata ( ) ) ; return writer ;
public class CmsDefaultXmlContentHandler { /** * Initializes the default values for this content handler . < p > * Using the default values from the appinfo node , it ' s possible to have more * sophisticated logic for generating the defaults then just using the XML schema " default " * attribute . < p > * @ param root the " defaults " element from the appinfo node of the XML content definition * @ param contentDefinition the content definition the default values belong to * @ throws CmsXmlException if something goes wrong */ protected void initDefaultValues ( Element root , CmsXmlContentDefinition contentDefinition ) throws CmsXmlException { } }
Iterator < Element > i = CmsXmlGenericWrapper . elementIterator ( root , APPINFO_DEFAULT ) ; while ( i . hasNext ( ) ) { // iterate all " default " elements in the " defaults " node Element element = i . next ( ) ; String elementName = element . attributeValue ( APPINFO_ATTR_ELEMENT ) ; String defaultValue = element . attributeValue ( APPINFO_ATTR_VALUE ) ; String resolveMacrosValue = element . attributeValue ( APPINFO_ATTR_RESOLVE_MACROS ) ; if ( ( elementName != null ) && ( defaultValue != null ) ) { // add a default value mapping for the element addDefault ( contentDefinition , elementName , defaultValue , resolveMacrosValue ) ; } }
public class DisplayUtil { /** * Returns the height of the device ' s display . * @ param context * The context , which should be used , as an instance of the class { @ link Context } . The * context may not be null * @ return The height of the device ' s display in pixels as an { @ link Integer } value */ public static int getDisplayHeight ( @ NonNull final Context context ) { } }
Condition . INSTANCE . ensureNotNull ( context , "The context may not be null" ) ; return context . getResources ( ) . getDisplayMetrics ( ) . heightPixels ;
public class LocationIndexTree { /** * this method returns the spatial key in reverse order for easier right - shifting */ final long createReverseKey ( double lat , double lon ) { } }
return BitUtil . BIG . reverse ( keyAlgo . encode ( lat , lon ) , keyAlgo . getBits ( ) ) ;
public class InjectorImpl { /** * This gets really nasty now that constructors can invoke operations on us . * The upshot is that we should check to see if instances have been * registered by callees after each recursive invocation of injectFromPlan or * constructor invocations . The error handling currently bails if the thing we * just instantiated should be discarded . * This could happen if ( for instance ) , a constructor did a * bindVolatileInstance of its own class to an instance , or somehow triggered * an injection of itself with a different plan ( an injection of itself with * the same plan would lead to an infinite recursion , so it ' s not really our * problem ) . * @ param plan * @ return * @ throws InjectionException */ @ SuppressWarnings ( "unchecked" ) private < T > T injectFromPlan ( final InjectionPlan < T > plan ) throws InjectionException { } }
if ( ! plan . isFeasible ( ) ) { throw new InjectionException ( "Cannot inject " + plan . getNode ( ) . getFullName ( ) + ": " + plan . toCantInjectString ( ) ) ; } if ( plan . isAmbiguous ( ) ) { throw new InjectionException ( "Cannot inject " + plan . getNode ( ) . getFullName ( ) + " " + plan . toCantInjectString ( ) ) ; } if ( plan instanceof InjectionFuturePlan ) { final InjectionFuturePlan < T > fut = ( InjectionFuturePlan < T > ) plan ; final String key = fut . getNode ( ) . getFullName ( ) ; try { final InjectionFuture < ? > ret = new InjectionFuture < > ( this , javaNamespace . classForName ( fut . getNode ( ) . getFullName ( ) ) ) ; pendingFutures . add ( ret ) ; return ( T ) ret ; } catch ( final ClassNotFoundException e ) { throw new InjectionException ( "Could not get class for " + key , e ) ; } } else if ( plan . getNode ( ) instanceof ClassNode && null != getCachedInstance ( ( ClassNode < T > ) plan . getNode ( ) ) ) { return getCachedInstance ( ( ClassNode < T > ) plan . getNode ( ) ) ; } else if ( plan instanceof JavaInstance ) { // TODO : Must be named parameter node . Check . // throw new IllegalStateException ( " Instance from plan not in Injector ' s set of instances ? ! ? " ) ; return ( ( JavaInstance < T > ) plan ) . getInstance ( ) ; } else if ( plan instanceof Constructor ) { final Constructor < T > constructor = ( Constructor < T > ) plan ; final Object [ ] args = new Object [ constructor . getArgs ( ) . length ] ; final InjectionPlan < ? > [ ] argPlans = constructor . getArgs ( ) ; for ( int i = 0 ; i < argPlans . length ; i ++ ) { args [ i ] = injectFromPlan ( argPlans [ i ] ) ; } try { concurrentModificationGuard = true ; T ret ; try { final ConstructorDef < T > def = constructor . getConstructorDef ( ) ; final java . lang . reflect . Constructor < T > construct = getConstructor ( def ) ; if ( aspect != null ) { ret = aspect . inject ( def , construct , args ) ; } else { ret = construct . newInstance ( args ) ; } } catch ( final IllegalArgumentException e ) { final StringBuilder sb = new StringBuilder ( "Internal Tang error? Could not call constructor " + constructor . getConstructorDef ( ) + " with arguments [" ) ; for ( final Object o : args ) { sb . append ( "\n\t" + o ) ; } sb . append ( "]" ) ; throw new IllegalStateException ( sb . toString ( ) , e ) ; } if ( ret instanceof ExternalConstructor ) { ret = ( ( ExternalConstructor < T > ) ret ) . newInstance ( ) ; } instances . put ( constructor . getNode ( ) , ret ) ; return ret ; } catch ( final ReflectiveOperationException e ) { throw new InjectionException ( "Could not invoke constructor: " + plan , e instanceof InvocationTargetException ? e . getCause ( ) : e ) ; } finally { concurrentModificationGuard = false ; } } else if ( plan instanceof Subplan ) { final Subplan < T > ambiguous = ( Subplan < T > ) plan ; return injectFromPlan ( ambiguous . getDelegatedPlan ( ) ) ; } else if ( plan instanceof SetInjectionPlan ) { final SetInjectionPlan < T > setPlan = ( SetInjectionPlan < T > ) plan ; final Set < T > ret = new MonotonicHashSet < > ( ) ; for ( final InjectionPlan < T > subplan : setPlan . getEntryPlans ( ) ) { ret . add ( injectFromPlan ( subplan ) ) ; } return ( T ) ret ; } else if ( plan instanceof ListInjectionPlan ) { final ListInjectionPlan < T > listPlan = ( ListInjectionPlan < T > ) plan ; final List < T > ret = new ArrayList < > ( ) ; for ( final InjectionPlan < T > subplan : listPlan . getEntryPlans ( ) ) { ret . add ( injectFromPlan ( subplan ) ) ; } return ( T ) ret ; } else { throw new IllegalStateException ( "Unknown plan type: " + plan ) ; }
public class N { /** * Mostly it ' s designed for one - step operation to complete the operation in one step . * < code > java . util . stream . Stream < / code > is preferred for multiple phases operation . * @ param a * @ param fromIndex * @ param toIndex * @ param func * @ return */ public static < T , R , E extends Exception > List < R > map ( final T [ ] a , final int fromIndex , final int toIndex , final Try . Function < ? super T , ? extends R , E > func ) throws E { } }
checkFromToIndex ( fromIndex , toIndex , len ( a ) ) ; N . checkArgNotNull ( func ) ; if ( N . isNullOrEmpty ( a ) ) { return new ArrayList < > ( ) ; } final List < R > result = new ArrayList < > ( toIndex - fromIndex ) ; for ( int i = fromIndex ; i < toIndex ; i ++ ) { result . add ( func . apply ( a [ i ] ) ) ; } return result ;
public class AnimaQuery { /** * Set the column name using lambda * @ param function lambda expressions , use the Model : : getXXX * @ param < R > * @ return AnimaQuery */ public < R > AnimaQuery < T > where ( TypeFunction < T , R > function ) { } }
String columnName = AnimaUtils . getLambdaColumnName ( function ) ; conditionSQL . append ( " AND " ) . append ( columnName ) ; return this ;
public class Project { /** * Get SecondaryWorkitems in this Project filtered as specified in the * passed in filter . * @ param filter Criteria to filter on . Project will be set automatically . * If null , all tasks and tests in the project are returned . * @ param includeSubprojects Specifies whether to include items from sub * project or not . This only adds open subprojects . * @ return An Collection of SecondaryWorkitem . */ public Collection < SecondaryWorkitem > getSecondaryWorkitems ( SecondaryWorkitemFilter filter , boolean includeSubprojects ) { } }
filter = ( filter != null ) ? filter : new SecondaryWorkitemFilter ( ) ; return getInstance ( ) . get ( ) . secondaryWorkitems ( getFilter ( filter , includeSubprojects ) ) ;
public class DateBag { /** * Wraps a given date in a format that can be easily unbagged */ @ Override public void bag ( Date unBaggedObject ) { } }
if ( unBaggedObject == null ) this . time = 0 ; else this . time = unBaggedObject . getTime ( ) ;
public class GetContentModerationResult { /** * The detected moderation labels and the time ( s ) they were detected . * @ param moderationLabels * The detected moderation labels and the time ( s ) they were detected . */ public void setModerationLabels ( java . util . Collection < ContentModerationDetection > moderationLabels ) { } }
if ( moderationLabels == null ) { this . moderationLabels = null ; return ; } this . moderationLabels = new java . util . ArrayList < ContentModerationDetection > ( moderationLabels ) ;
public class TableWriterServiceImpl { /** * Opens a new segment writer . The segment ' s sequence id will be the next * sequence number . * @ return the new segment writer */ public OutSegment openWriter ( ) { } }
if ( isGcRequired ( ) ) { // _ gcSequence = _ seqGen . get ( ) ; _table . getGcService ( ) . gc ( _seqGen . get ( ) ) ; _seqSinceGcCount = 0 ; } _seqSinceGcCount ++ ; long sequence = _seqGen . get ( ) ; return openWriterSeq ( sequence ) ;
public class Order { /** * Write members to a MwsWriter . * @ param w * The writer to write to . */ @ Override public void writeFragmentTo ( MwsWriter w ) { } }
w . write ( "AmazonOrderId" , amazonOrderId ) ; w . write ( "SellerOrderId" , sellerOrderId ) ; w . write ( "PurchaseDate" , purchaseDate ) ; w . write ( "LastUpdateDate" , lastUpdateDate ) ; w . write ( "OrderStatus" , orderStatus ) ; w . write ( "FulfillmentChannel" , fulfillmentChannel ) ; w . write ( "SalesChannel" , salesChannel ) ; w . write ( "OrderChannel" , orderChannel ) ; w . write ( "ShipServiceLevel" , shipServiceLevel ) ; w . write ( "ShippingAddress" , shippingAddress ) ; w . write ( "OrderTotal" , orderTotal ) ; w . write ( "NumberOfItemsShipped" , numberOfItemsShipped ) ; w . write ( "NumberOfItemsUnshipped" , numberOfItemsUnshipped ) ; w . writeList ( "PaymentExecutionDetail" , "PaymentExecutionDetailItem" , paymentExecutionDetail ) ; w . write ( "PaymentMethod" , paymentMethod ) ; w . write ( "MarketplaceId" , marketplaceId ) ; w . write ( "BuyerEmail" , buyerEmail ) ; w . write ( "BuyerName" , buyerName ) ; w . write ( "ShipmentServiceLevelCategory" , shipmentServiceLevelCategory ) ; w . write ( "ShippedByAmazonTFM" , shippedByAmazonTFM ) ; w . write ( "TFMShipmentStatus" , tfmShipmentStatus ) ; w . write ( "CbaDisplayableShippingLabel" , cbaDisplayableShippingLabel ) ; w . write ( "OrderType" , orderType ) ; w . write ( "EarliestShipDate" , earliestShipDate ) ; w . write ( "LatestShipDate" , latestShipDate ) ; w . write ( "EarliestDeliveryDate" , earliestDeliveryDate ) ; w . write ( "LatestDeliveryDate" , latestDeliveryDate ) ;
public class UpdateIdentityPoolResult { /** * An array of Amazon Resource Names ( ARNs ) of the SAML provider for your identity pool . * @ param samlProviderARNs * An array of Amazon Resource Names ( ARNs ) of the SAML provider for your identity pool . */ public void setSamlProviderARNs ( java . util . Collection < String > samlProviderARNs ) { } }
if ( samlProviderARNs == null ) { this . samlProviderARNs = null ; return ; } this . samlProviderARNs = new java . util . ArrayList < String > ( samlProviderARNs ) ;
public class FlowControllerFactory { /** * Get a { @ link FlowControllerFactory } . The instance returned may or may not have been cached . * @ param servletContext the current { @ link ServletContext } . * @ return a { @ link FlowControllerFactory } for the given { @ link ServletContext } . */ public static FlowControllerFactory get ( ServletContext servletContext ) { } }
FlowControllerFactory factory = ( FlowControllerFactory ) servletContext . getAttribute ( CONTEXT_ATTR ) ; assert factory != null : FlowControllerFactory . class . getName ( ) + " was not found in ServletContext attribute " + CONTEXT_ATTR ; factory . reinit ( servletContext ) ; return factory ;
public class XWikiDOMSerializer { /** * Create the DOM given a rootNode and a document builder . * This method is a replica of { @ link DomSerializer # createDOM ( TagNode ) } excepts that it requires to give a * DocumentBuilder . * @ param documentBuilder the { @ link DocumentBuilder } instance to use , DocumentBuilder is not guaranteed to * be thread safe so at most the safe instance should be used only in the same thread * @ param rootNode the HTML Cleaner root node to serialize * @ return the W3C Document object * @ throws ParserConfigurationException if there ' s an error during serialization */ public Document createDOM ( DocumentBuilder documentBuilder , TagNode rootNode ) throws ParserConfigurationException { } }
Document document = createDocument ( documentBuilder , rootNode ) ; createSubnodes ( document , document . getDocumentElement ( ) , rootNode . getAllChildren ( ) ) ; return document ;
public class StreamSnapshotSink { /** * Assemble the chunk so that it can be used to construct the VoltTable that * will be passed to EE . * @ param buf * @ return */ public static ByteBuffer getNextChunk ( byte [ ] schemaBytes , ByteBuffer buf , CachedByteBufferAllocator resultBufferAllocator ) { } }
buf . position ( buf . position ( ) + 4 ) ; // skip partition id int length = schemaBytes . length + buf . remaining ( ) ; ByteBuffer outputBuffer = resultBufferAllocator . allocate ( length ) ; outputBuffer . put ( schemaBytes ) ; outputBuffer . put ( buf ) ; outputBuffer . flip ( ) ; return outputBuffer ;
public class SmartTable { /** * Removes the specified style names on the specified row and column . */ public void removeStyleNames ( int row , int column , String ... styles ) { } }
for ( String style : styles ) { getFlexCellFormatter ( ) . removeStyleName ( row , column , style ) ; }
public class JsonGenerator { /** * TODO dirty and incorrect for overlaps */ private void mergeOneOf ( ) { } }
if ( SchemaCompositeMatchers . hasOneOf ( ) . matches ( schema ) ) { SchemaList list = schema . getOneOf ( ) ; Schema choice = list . get ( rnd . nextInt ( list . size ( ) ) ) ; schema . getJson ( ) . remove ( "oneOf" ) ; schema . merge ( choice ) ; }
public class TypeQualifierApplications { /** * static Map < String , Throwable > checked = new HashMap < String , Throwable > ( ) ; */ private static TypeQualifierAnnotation computeEffectiveTypeQualifierAnnotation ( TypeQualifierValue < ? > typeQualifierValue , XMethod xmethod , int parameter ) { } }
if ( DEBUG ) { // System . out . println ( " XX : " // + System . identityHashCode ( typeQualifierValue ) ) ; if ( typeQualifierValue . value != null ) { System . out . println ( " Value is " + typeQualifierValue . value + "(" + typeQualifierValue . value . getClass ( ) . toString ( ) + ")" ) ; } } Map < TypeQualifierValue < ? > , DualKeyHashMap < XMethod , Integer , TypeQualifierAnnotation > > effectiveParameterAnnotations = getEffectiveParameterAnnotations ( ) ; DualKeyHashMap < XMethod , Integer , TypeQualifierAnnotation > map = effectiveParameterAnnotations . get ( typeQualifierValue ) ; if ( map == null ) { if ( DEBUG ) { System . out . println ( "computeEffectiveTypeQualifierAnnotation: Creating map for " + typeQualifierValue ) ; } map = new DualKeyHashMap < > ( ) ; effectiveParameterAnnotations . put ( typeQualifierValue , map ) ; } // Check cached answer TypeQualifierAnnotation result ; if ( map . containsKey ( xmethod , parameter ) ) { result = map . get ( xmethod , parameter ) ; } else { if ( DEBUG ) { System . out . println ( "Looking up application of " + typeQualifierValue + " on " + xmethod + " parameter " + parameter ) ; } // String desc = // xmethod . toString ( ) + " : " + parameter + " : " + typeQualifierValue ; // if ( checked . containsKey ( desc ) ) { // / / throw new IllegalStateException ( " Repeating computation of " + // desc , checked . get ( desc ) ) ; // System . out . println ( " Repeating computation of " + desc ) ; // System . out . println ( " Previously computed : " ) ; // checked . get ( desc ) . printStackTrace ( System . out ) ; // throw new IllegalStateException ( ) ; // checked . put ( desc , new Throwable ( ) . fillInStackTrace ( ) ) ; // Compute answer TypeQualifierAnnotation tqa ; if ( xmethod . isVarArgs ( ) && parameter == xmethod . getNumParams ( ) - 1 ) { tqa = null ; if ( DEBUG ) { System . out . print ( " vararg parameters don't get type qualifiers" ) ; } } else { // Check direct application if ( DEBUG ) { System . out . print ( " (1) Checking direct application..." ) ; } tqa = getDirectTypeQualifierAnnotation ( xmethod , parameter , typeQualifierValue ) ; if ( DEBUG ) { System . out . println ( tqa != null ? "FOUND" : "none" ) ; } // If it ' s an instance method , check for inherited annotation if ( tqa == null && ! xmethod . isStatic ( ) && ! xmethod . isPrivate ( ) && ! "<init>" . equals ( xmethod . getName ( ) ) ) { if ( DEBUG ) { System . out . print ( " (2) Checking inherited..." ) ; } tqa = getInheritedTypeQualifierAnnotation ( xmethod , parameter , typeQualifierValue ) ; if ( DEBUG ) { if ( tqa == TypeQualifierAnnotation . OVERRIDES_BUT_NO_ANNOTATION ) { System . out . println ( "Overrides, no annotation inherited" ) ; } else if ( tqa != null ) { System . out . println ( "Inherited " + tqa . when ) ; } else { System . out . println ( "Nothing inherited" ) ; } } } boolean overriddenMethod = false ; if ( tqa == TypeQualifierAnnotation . OVERRIDES_BUT_NO_ANNOTATION ) { overriddenMethod = true ; tqa = null ; } // Check for default ( outer scope ) annotation if ( tqa == null ) { if ( xmethod . isVariableSynthetic ( ( xmethod . isStatic ( ) ? 0 : 1 ) + parameter ) ) { if ( DEBUG ) { System . out . print ( " (3) Skipping default for synthetic parameter" ) ; } } else { if ( DEBUG ) { System . out . print ( " (3) Checking default..." ) ; } tqa = getDefaultTypeQualifierAnnotationForParameters ( xmethod , typeQualifierValue , overriddenMethod ) ; if ( DEBUG ) { System . out . println ( tqa != null ? "FOUND" : "none" ) ; } } } } // Cache answer result = tqa ; map . put ( xmethod , parameter , result ) ; if ( DEBUG ) { if ( result == null ) { System . out . println ( " => Answer: no annotation on parameter " + parameter + " of " + xmethod ) ; } else { System . out . println ( " => Answer: " + result . when + " on parameter " + parameter + " of " + xmethod ) ; } } } if ( ! map . containsKey ( xmethod , parameter ) ) { throw new IllegalStateException ( "Did not populate cache?" ) ; } // Return cached answer return result ;
public class CPSpecificationOptionPersistenceImpl { /** * Removes all the cp specification options where CPOptionCategoryId = & # 63 ; from the database . * @ param CPOptionCategoryId the cp option category ID */ @ Override public void removeByCPOptionCategoryId ( long CPOptionCategoryId ) { } }
for ( CPSpecificationOption cpSpecificationOption : findByCPOptionCategoryId ( CPOptionCategoryId , QueryUtil . ALL_POS , QueryUtil . ALL_POS , null ) ) { remove ( cpSpecificationOption ) ; }
public class EventWaitActivity { /** * Method that executes the logic based on the work */ public void execute ( ) throws ActivityException { } }
EventWaitInstance received = registerWaitEvents ( false , true ) ; if ( received != null ) { setReturnCodeAndExitStatus ( received . getCompletionCode ( ) ) ; processMessage ( getExternalEventInstanceDetails ( received . getMessageDocumentId ( ) ) ) ; boolean toFinish = handleCompletionCode ( ) ; if ( toFinish && exitStatus == null ) exitStatus = WorkStatus . STATUS_COMPLETED ; } else { try { // set timeouts int timeout = getTimeoutSeconds ( ) ; if ( timeout > 0 ) { loginfo ( "set activity timeout as " + timeout + " seconds" ) ; InternalEvent delayMsg = InternalEvent . createActivityDelayMessage ( getActivityInstance ( ) , getMasterRequestId ( ) ) ; getEngine ( ) . sendDelayedInternalEvent ( delayMsg , timeout , ScheduledEvent . INTERNAL_EVENT_PREFIX + getActivityInstanceId ( ) + "timeout" , false ) ; } } catch ( MdwException e ) { throw new ActivityException ( 0 , "Failed to set timeout" , e ) ; } }
public class DefaultDecoder { /** * Options returned by this method are configured with mDecodeBuffer which is GuardedBy ( " this " ) */ private static BitmapFactory . Options getDecodeOptionsForStream ( EncodedImage encodedImage , Bitmap . Config bitmapConfig ) { } }
final BitmapFactory . Options options = new BitmapFactory . Options ( ) ; // Sample size should ONLY be different than 1 when downsampling is enabled in the pipeline options . inSampleSize = encodedImage . getSampleSize ( ) ; options . inJustDecodeBounds = true ; // fill outWidth and outHeight BitmapFactory . decodeStream ( encodedImage . getInputStream ( ) , null , options ) ; if ( options . outWidth == - 1 || options . outHeight == - 1 ) { throw new IllegalArgumentException ( ) ; } options . inJustDecodeBounds = false ; options . inDither = true ; options . inPreferredConfig = bitmapConfig ; options . inMutable = true ; return options ;
public class TimelineModel { /** * Deletes a given event in the model with UI update . * @ param event event to be deleted * @ param timelineUpdater TimelineUpdater instance to delete the event in UI */ public void delete ( TimelineEvent event , TimelineUpdater timelineUpdater ) { } }
int index = getIndex ( event ) ; if ( index >= 0 ) { events . remove ( event ) ; if ( timelineUpdater != null ) { // update UI timelineUpdater . delete ( index ) ; } }
public class ExcelWriter { /** * 为指定的key列表添加标题别名 , 如果没有定义key的别名 , 在onlyAlias为false时使用原key * @ param keys 键列表 * @ return 别名列表 */ private Map < ? , ? > aliasMap ( Map < ? , ? > rowMap ) { } }
if ( MapUtil . isEmpty ( this . headerAlias ) ) { return rowMap ; } final Map < Object , Object > filteredMap = new LinkedHashMap < > ( ) ; String aliasName ; for ( Entry < ? , ? > entry : rowMap . entrySet ( ) ) { aliasName = this . headerAlias . get ( entry . getKey ( ) ) ; if ( null != aliasName ) { // 别名键值对加入 filteredMap . put ( aliasName , entry . getValue ( ) ) ; } else if ( false == this . onlyAlias ) { // 保留无别名设置的键值对 filteredMap . put ( entry . getKey ( ) , entry . getValue ( ) ) ; } } return filteredMap ;
public class Props { /** * Store only those properties defined at this local level * @ param out The output stream to write to * @ throws IOException If the file can ' t be found or there is an io error */ public void storeLocal ( final OutputStream out ) throws IOException { } }
final Properties p = new Properties ( ) ; for ( final String key : this . _current . keySet ( ) ) { p . setProperty ( key , get ( key ) ) ; } p . store ( out , null ) ;
public class KeyVaultClientCustomImpl { /** * List secrets in the specified vault . * @ param vaultBaseUrl * The vault name , e . g . https : / / myvault . vault . azure . net * @ param serviceCallback * the async ServiceCallback to handle successful and failed * responses . * @ return the { @ link ServiceFuture } object */ public ServiceFuture < List < SecretItem > > listSecretsAsync ( final String vaultBaseUrl , final ListOperationCallback < SecretItem > serviceCallback ) { } }
return getSecretsAsync ( vaultBaseUrl , serviceCallback ) ;
public class Invocation { /** * this method can be called concurrently */ void notifyBackupComplete ( ) { } }
int newBackupAcksCompleted = BACKUP_ACKS_RECEIVED . incrementAndGet ( this ) ; Object pendingResponse = this . pendingResponse ; if ( pendingResponse == VOID ) { // no pendingResponse has been set , so we are done since the invocation on the primary needs to complete first return ; } // if a pendingResponse is set , then the backupsAcksExpected has been set ( so we can now safely read backupsAcksExpected ) int backupAcksExpected = this . backupsAcksExpected ; if ( backupAcksExpected < newBackupAcksCompleted ) { // the backups have not yet completed , so we are done return ; } if ( backupAcksExpected != newBackupAcksCompleted ) { // we managed to complete one backup , but we were not the one completing the last backup , so we are done return ; } // we are the lucky one since we just managed to complete the last backup for this invocation and since the // pendingResponse is set , we can set it on the future complete ( pendingResponse ) ;
public class Ifc4PackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public EClass getIfcSpecificHeatCapacityMeasure ( ) { } }
if ( ifcSpecificHeatCapacityMeasureEClass == null ) { ifcSpecificHeatCapacityMeasureEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( Ifc4Package . eNS_URI ) . getEClassifiers ( ) . get ( 869 ) ; } return ifcSpecificHeatCapacityMeasureEClass ;
public class CPDefinitionInventoryPersistenceImpl { /** * Removes all the cp definition inventories where uuid = & # 63 ; from the database . * @ param uuid the uuid */ @ Override public void removeByUuid ( String uuid ) { } }
for ( CPDefinitionInventory cpDefinitionInventory : findByUuid ( uuid , QueryUtil . ALL_POS , QueryUtil . ALL_POS , null ) ) { remove ( cpDefinitionInventory ) ; }
public class IntervalTree { /** * Ceiling T . * @ param T the T * @ return the T */ public T ceiling ( T T ) { } }
if ( T == null ) { return null ; } NodeIterator < T > iterator = new NodeIterator < > ( root , T . end ( ) , Integer . MAX_VALUE , true ) ; if ( iterator . hasNext ( ) ) { return iterator . next ( ) ; } return null ;
public class ResourceRegistryImpl { /** * Searches the registry for a resource identified by a JSON API resource * type . If a resource cannot be found , < i > null < / i > is returned . * @ param searchType * resource type * @ return registry entry or < i > null < / i > */ public RegistryEntry getEntry ( String searchType ) { } }
for ( Map . Entry < Class , RegistryEntry > entry : resources . entrySet ( ) ) { String type = getResourceType ( entry . getKey ( ) ) ; if ( type == null ) { return null ; } if ( type . equals ( searchType ) ) { return entry . getValue ( ) ; } } return null ;
public class GlobalUniqueIndex { /** * JSON representation of committed state * @ return */ private JsonNode toJson ( ) { } }
ObjectNode result = new ObjectNode ( Topology . OBJECT_MAPPER . getNodeFactory ( ) ) ; ArrayNode propertyArrayNode = new ArrayNode ( Topology . OBJECT_MAPPER . getNodeFactory ( ) ) ; for ( PropertyColumn property : this . properties ) { ObjectNode objectNode = property . toNotifyJson ( ) ; objectNode . put ( "schemaName" , property . getParentLabel ( ) . getSchema ( ) . getName ( ) ) ; objectNode . put ( "abstractLabelLabel" , property . getParentLabel ( ) . getLabel ( ) ) ; propertyArrayNode . add ( objectNode ) ; } result . put ( "name" , getName ( ) ) ; result . set ( "properties" , propertyArrayNode ) ; return result ;
public class HtmlDocletWriter { /** * Get the deprecated phrase as content . * @ param e the Element for which the inline deprecated comment will be added * @ return a content tree for the deprecated phrase . */ public Content getDeprecatedPhrase ( Element e ) { } }
return ( utils . isDeprecatedForRemoval ( e ) ) ? contents . deprecatedForRemovalPhrase : contents . deprecatedPhrase ;
public class ThreadPoolNotifier { /** * Enqueues a notification to run . If the notification fails , it will be retried every two * minutes until 5 attempts are completed . Notifications to the same callback should be * delivered successfully in order . * @ param not */ @ Override protected void enqueueNotification ( final Notification not ) { } }
final Runnable r = new Runnable ( ) { @ Override public void run ( ) { not . lastRun = System . currentTimeMillis ( ) ; final SubscriptionSummary summary = postNotification ( not . subscriber , not . mimeType , not . payload ) ; if ( ! summary . isLastPublishSuccessful ( ) ) { not . retryCount ++ ; if ( not . retryCount <= 5 ) { retry ( not ) ; } } not . callback . onSummaryInfo ( summary ) ; } } ; exeuctor . execute ( r ) ;
public class DefaultExceptionContext { /** * { @ inheritDoc } */ @ Override public DefaultExceptionContext setContextValue ( final String label , final Object value ) { } }
for ( final Iterator < Pair < String , Object > > iter = contextValues . iterator ( ) ; iter . hasNext ( ) ; ) { final Pair < String , Object > p = iter . next ( ) ; if ( StringUtils . equals ( label , p . getKey ( ) ) ) { iter . remove ( ) ; } } addContextValue ( label , value ) ; return this ;
public class AT_Row { /** * Sets the right padding character for all cells in the row . * @ param paddingRightChar new padding character , ignored if null * @ return this to allow chaining */ public AT_Row setPaddingRightChar ( Character paddingRightChar ) { } }
if ( this . hasCells ( ) ) { for ( AT_Cell cell : this . getCells ( ) ) { cell . getContext ( ) . setPaddingRightChar ( paddingRightChar ) ; } } return this ;
public class ReflectionUtils { /** * Rethrow the given { @ link Throwable exception } , which is presumably the * < em > target exception < / em > of an { @ link InvocationTargetException } . Should * only be called if no checked exception is expected to be thrown by the * target method . * < p > Rethrows the underlying exception cast to an { @ link Exception } or * { @ link Error } if appropriate ; otherwise , throws an * { @ link IllegalStateException } . * @ param ex the exception to rethrow * @ throws Exception the rethrown exception ( in case of a checked exception ) */ public static void rethrowException ( Throwable ex ) throws Exception { } }
if ( ex instanceof Exception ) { throw ( Exception ) ex ; } if ( ex instanceof Error ) { throw ( Error ) ex ; } throw new UndeclaredThrowableException ( ex ) ;
public class IntCounter { /** * Returns the total count for all objects in this Counter that pass the * given Filter . Passing in a filter that always returns true is equivalent * to calling { @ link # totalCount ( ) } . */ public int totalIntCount ( Filter < E > filter ) { } }
int total = 0 ; for ( E key : map . keySet ( ) ) { if ( filter . accept ( key ) ) { total += getIntCount ( key ) ; } } return ( total ) ;
public class EnumIO { /** * Retrieves the enum key type from the EnumMap via reflection . This is used by { @ link ObjectSchema } . */ static Class < ? > getKeyTypeFromEnumMap ( Object enumMap ) { } }
if ( __keyTypeFromEnumMap == null ) { throw new RuntimeException ( "Could not access (reflection) the private " + "field *keyType* (enumClass) from: class java.util.EnumMap" ) ; } try { return ( Class < ? > ) __keyTypeFromEnumMap . get ( enumMap ) ; } catch ( Exception e ) { throw new RuntimeException ( e ) ; }
public class LoggingHandlerInterceptor { /** * Builds raw request message content from Http servlet request . * @ param request * @ return * @ throws IOException */ private String getRequestContent ( HttpServletRequest request ) throws IOException { } }
StringBuilder builder = new StringBuilder ( ) ; builder . append ( request . getProtocol ( ) ) ; builder . append ( " " ) ; builder . append ( request . getMethod ( ) ) ; builder . append ( " " ) ; builder . append ( request . getRequestURI ( ) ) ; builder . append ( NEWLINE ) ; Enumeration < ? > headerNames = request . getHeaderNames ( ) ; while ( headerNames . hasMoreElements ( ) ) { String headerName = headerNames . nextElement ( ) . toString ( ) ; builder . append ( headerName ) ; builder . append ( ":" ) ; Enumeration < ? > headerValues = request . getHeaders ( headerName ) ; if ( headerValues . hasMoreElements ( ) ) { builder . append ( headerValues . nextElement ( ) ) ; } while ( headerValues . hasMoreElements ( ) ) { builder . append ( "," ) ; builder . append ( headerValues . nextElement ( ) ) ; } builder . append ( NEWLINE ) ; } builder . append ( NEWLINE ) ; builder . append ( FileUtils . readToString ( request . getInputStream ( ) ) ) ; return builder . toString ( ) ;
public class DeviceProxyFactory { public static boolean exists ( String deviceName ) throws DevFailed { } }
// Get full device name ( with tango host ) to manage multi tango _ host String fullDeviceName = new TangoUrl ( deviceName ) . toString ( ) ; // Get it if already exists DeviceProxy dev = proxy_table . get ( fullDeviceName ) ; return ( dev != null ) ;
public class DateLabels { /** * Creates a label which displays date only ( year , month , day ) . * @ param id component id * @ param model model returning date to display * @ return date label */ public static DateLabel forDate ( String id , IModel < Date > model ) { } }
return DateLabel . forDatePattern ( id , model , DATE_PATTERN ) ;
public class BSUImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public void eSet ( int featureID , Object newValue ) { } }
switch ( featureID ) { case AfplibPackage . BSU__LID : setLID ( ( Integer ) newValue ) ; return ; } super . eSet ( featureID , newValue ) ;
public class Tree { /** * Much of the code below is taken from the HtmlBaseTag . We need to eliminate this duplication * through some type of helper methods . */ private String renderDefaultJavaScript ( HttpServletRequest request , String realId ) { } }
String idScript = null ; // map the tagId to the real id if ( TagConfig . isDefaultJavaScript ( ) ) { ScriptRequestState srs = ScriptRequestState . getScriptRequestState ( request ) ; idScript = srs . mapTagId ( getScriptReporter ( ) , _trs . tagId , realId , null ) ; } return idScript ;
public class Stream { /** * If specified array is null , returns an empty { @ code Stream } , * otherwise returns a { @ code Stream } containing elements of this array . * @ param < T > the type of the stream elements * @ param array the array whose elements to be passed to stream * @ return the new stream * @ since 1.1.9 */ @ NotNull public static < T > Stream < T > ofNullable ( @ Nullable final T [ ] array ) { } }
return ( array == null ) ? Stream . < T > empty ( ) : Stream . of ( array ) ;
public class ZKPaths { /** * Make sure all the nodes in the path are created . NOTE : Unlike File . mkdirs ( ) , Zookeeper doesn ' t distinguish * between directories and files . So , every node in the path is created . The data for each node is an empty blob * @ param zookeeper the client * @ param path path to ensure * @ param makeLastNode if true , all nodes are created . If false , only the parent nodes are created * @ param aclProvider if not null , the ACL provider to use when creating parent nodes * @ param asContainers if true , nodes are created as CreateMode # CONTAINER * @ throws InterruptedException thread interruption * @ throws org . apache . zookeeper . KeeperException Zookeeper errors */ public static void mkdirs ( ZooKeeper zookeeper , String path , boolean makeLastNode , InternalACLProvider aclProvider , boolean asContainers ) throws InterruptedException , KeeperException { } }
PathUtils . validatePath ( path ) ; int pos = 1 ; // skip first slash , root is guaranteed to exist do { pos = path . indexOf ( PATH_SEPARATOR , pos + 1 ) ; if ( pos == - 1 ) { if ( makeLastNode ) { pos = path . length ( ) ; } else { break ; } } String subPath = path . substring ( 0 , pos ) ; if ( zookeeper . exists ( subPath , false ) == null ) { try { List < ACL > acl = null ; if ( aclProvider != null ) { acl = aclProvider . getAclForPath ( subPath ) ; if ( acl == null ) { acl = aclProvider . getDefaultAcl ( ) ; } } if ( acl == null ) { acl = ZooDefs . Ids . OPEN_ACL_UNSAFE ; } zookeeper . create ( subPath , new byte [ 0 ] , acl , getCreateMode ( asContainers ) ) ; } catch ( KeeperException . NodeExistsException e ) { // ignore . . . someone else has created it since we checked } } } while ( pos < path . length ( ) ) ;
public class CampaignCriterionServiceLocator { /** * For the given interface , get the stub implementation . * If this service has no port for the given interface , * then ServiceException is thrown . */ public java . rmi . Remote getPort ( Class serviceEndpointInterface ) throws javax . xml . rpc . ServiceException { } }
try { if ( com . google . api . ads . adwords . axis . v201809 . cm . CampaignCriterionServiceInterface . class . isAssignableFrom ( serviceEndpointInterface ) ) { com . google . api . ads . adwords . axis . v201809 . cm . CampaignCriterionServiceSoapBindingStub _stub = new com . google . api . ads . adwords . axis . v201809 . cm . CampaignCriterionServiceSoapBindingStub ( new java . net . URL ( CampaignCriterionServiceInterfacePort_address ) , this ) ; _stub . setPortName ( getCampaignCriterionServiceInterfacePortWSDDServiceName ( ) ) ; return _stub ; } } catch ( java . lang . Throwable t ) { throw new javax . xml . rpc . ServiceException ( t ) ; } throw new javax . xml . rpc . ServiceException ( "There is no stub implementation for the interface: " + ( serviceEndpointInterface == null ? "null" : serviceEndpointInterface . getName ( ) ) ) ;
public class Period { /** * Returns a new period with the specified number of minutes . * This period instance is immutable and unaffected by this method call . * @ param minutes the amount of minutes to add , may be negative * @ return the new period with the increased minutes * @ throws UnsupportedOperationException if the field is not supported */ public Period withMinutes ( int minutes ) { } }
int [ ] values = getValues ( ) ; // cloned getPeriodType ( ) . setIndexedField ( this , PeriodType . MINUTE_INDEX , values , minutes ) ; return new Period ( values , getPeriodType ( ) ) ;
public class DefaultSegmentedDataContainer { /** * Priority has to be higher than the clear priority - which is currently 999 */ @ Stop ( priority = 9999 ) public void stop ( ) { } }
for ( int i = 0 ; i < maps . length ( ) ; ++ i ) { stopMap ( i , false ) ; }
public class JsonUtil { /** * Writes a resources JSON view to a writer using the default application rules for filtering . * @ param writer the writer for the JSON transformation * @ param resource the resource to transform * @ throws RepositoryException * @ throws IOException */ public static void exportJson ( JsonWriter writer , Resource resource ) throws RepositoryException , IOException { } }
exportJson ( writer , resource , MappingRules . getDefaultMappingRules ( ) ) ;
public class CmsUgcSession { /** * Checks that the session is not finished , and throws an exception otherwise . < p > * @ throws CmsUgcException if the session is finished */ private void checkNotFinished ( ) throws CmsUgcException { } }
if ( m_finished ) { String message = Messages . get ( ) . container ( Messages . ERR_FORM_SESSION_ALREADY_FINISHED_0 ) . key ( getCmsObject ( ) . getRequestContext ( ) . getLocale ( ) ) ; throw new CmsUgcException ( CmsUgcConstants . ErrorCode . errInvalidAction , message ) ; }
public class FileOperations { /** * Downloads the specified file from the specified task ' s directory on its compute node . * @ param jobId The ID of the job containing the task . * @ param taskId The ID of the task . * @ param fileName The name of the file to download . * @ param outputStream A stream into which the file contents will be written . * @ throws BatchErrorException Exception thrown when an error response is received from the Batch service . * @ throws IOException Exception thrown when there is an error in serialization / deserialization of data sent to / received from the Batch service . */ public void getFileFromTask ( String jobId , String taskId , String fileName , OutputStream outputStream ) throws BatchErrorException , IOException { } }
getFileFromTask ( jobId , taskId , fileName , null , outputStream ) ;
public class AnnotationUtils { /** * Get all { @ link Annotation Annotations } from the supplied { @ link Method } . * < p > Correctly handles bridge { @ link Method Methods } generated by the compiler . * @ param method the Method to retrieve annotations from * @ return the annotations found */ public static Annotation [ ] getAnnotations ( Method method ) { } }
try { return BridgeMethodResolver . findBridgedMethod ( method ) . getAnnotations ( ) ; } catch ( Exception ex ) { // Assuming nested Class values not resolvable within annotation attributes . . . logIntrospectionFailure ( method , ex ) ; return null ; }
public class Parser { /** * Copies all content from { @ code from } to { @ code to } . */ @ Private static StringBuilder read ( Readable from ) throws IOException { } }
StringBuilder builder = new StringBuilder ( ) ; CharBuffer buf = CharBuffer . allocate ( 2048 ) ; for ( ; ; ) { int r = from . read ( buf ) ; if ( r == - 1 ) break ; buf . flip ( ) ; builder . append ( buf , 0 , r ) ; } return builder ;
public class WrappedObjectMapperProcessor { /** * Find the first compatible setter ( conversion from String is supported ) and invoke it . * @ param target * the object to change and setter name . * @ param value * the value . * @ throws IllegalAccessException * when there is a problem . * @ throws InvocationTargetException * when there is a problem . * @ throws NoSuchMethodError * when there is no compatible setter . */ private void findAndInvokeCompatibleSetter ( SetterTarget target , String value ) throws IllegalAccessException , InvocationTargetException , NoSuchMethodError { } }
Object _objectToChange = target . getObjectToChange ( ) ; Method [ ] _declaredMethods = _objectToChange . getClass ( ) . getDeclaredMethods ( ) ; String _setterName = target . getSetterName ( ) ; for ( Method _candidate : _declaredMethods ) { if ( _candidate . getName ( ) . equals ( _setterName ) ) { Class < ? > [ ] _parameterTypes = _candidate . getParameterTypes ( ) ; if ( _parameterTypes . length != 1 ) { continue ; } Class < ? > _class = _parameterTypes [ 0 ] ; if ( _class == String . class ) { // no conversion _candidate . invoke ( _objectToChange , value ) ; return ; } else if ( _class == Integer . TYPE ) { _candidate . invoke ( _objectToChange , Integer . parseInt ( value ) ) ; return ; } else if ( _class == Long . TYPE ) { _candidate . invoke ( _objectToChange , Long . parseLong ( value ) ) ; return ; } else if ( _class == Float . TYPE ) { _candidate . invoke ( _objectToChange , Float . parseFloat ( value ) ) ; return ; } else if ( _class == Double . TYPE ) { _candidate . invoke ( _objectToChange , Double . parseDouble ( value ) ) ; return ; } else if ( _class == Boolean . TYPE ) { _candidate . invoke ( _objectToChange , Boolean . parseBoolean ( value ) ) ; return ; } } } throw new NoSuchMethodError ( _setterName ) ;
public class ScriptBlock { /** * Place the JavaScript inside in relationship to the frameword generated JavaScript . * @ param placement The placement of the JavaScript * @ jsptagref . attributedescription String value ' after ' or ' before ' . Places the JavaScript * before or after the JavaScript provided by the framework . * @ jsptagref . databindable false * @ jsptagref . attributesyntaxvalue < i > string _ or _ expression _ output < / i > * @ netui : attribute rtexprvalue = " true " * description = " The String literal or expression used to output the content . " */ public void setPlacement ( String placement ) { } }
if ( placement . equals ( "after" ) ) _placement = ScriptPlacement . PLACE_AFTER ; else if ( placement . equals ( "before" ) ) _placement = ScriptPlacement . PLACE_BEFORE ; else _placement = ScriptPlacement . PLACE_INLINE ;
public class GearWearableUtility { /** * Check if a position is within a circle * @ param x x position * @ param y y position * @ param centerX center x of circle * @ param centerY center y of circle * @ return true if within circle , false otherwise */ static boolean isInCircle ( float x , float y , float centerX , float centerY , float radius ) { } }
return Math . abs ( x - centerX ) < radius && Math . abs ( y - centerY ) < radius ;
public class TextBoxView { /** * Gets the text . * @ param position * the position , where to begin * @ param len * the length of text portion * @ return the text */ protected String getTextEx ( int position , int len ) { } }
try { return getDocument ( ) . getText ( position , len ) ; } catch ( BadLocationException e ) { e . printStackTrace ( ) ; return "" ; }
public class UtilFeature { /** * Creates a FastQueue and declares new instances of the descriptor using the provided * { @ link DetectDescribePoint } . The queue will have declareInstance set to true , otherwise * why would you be using this function ? */ public static < TD extends TupleDesc > FastQueue < TD > createQueue ( final DescriptorInfo < TD > detDesc , int initialMax ) { } }
return new FastQueue < TD > ( initialMax , detDesc . getDescriptionType ( ) , true ) { @ Override protected TD createInstance ( ) { return detDesc . createDescription ( ) ; } } ;
public class BinaryProtocol { /** * Close the connection and shutdown the handler thread . * @ throws IOException * @ throws InterruptedException */ public void close ( ) throws IOException , InterruptedException { } }
LOG . debug ( "closing connection" ) ; stream . close ( ) ; uplink . closeConnection ( ) ; uplink . interrupt ( ) ; uplink . join ( ) ;
public class UserPreferences { /** * Sets the key prefix . * @ param keyPrefix the new key prefix */ public static void setKeyPrefix ( final String keyPrefix ) { } }
UserPreferences . keyPrefix = keyPrefix ; try { systemRoot . sync ( ) ; } catch ( final Exception e ) { JKExceptionUtil . handle ( e ) ; }
public class InternalPureXbaseParser { /** * InternalPureXbase . g : 1847:1 : entryRuleOpAdd returns [ String current = null ] : iv _ ruleOpAdd = ruleOpAdd EOF ; */ public final String entryRuleOpAdd ( ) throws RecognitionException { } }
String current = null ; AntlrDatatypeRuleToken iv_ruleOpAdd = null ; try { // InternalPureXbase . g : 1847:45 : ( iv _ ruleOpAdd = ruleOpAdd EOF ) // InternalPureXbase . g : 1848:2 : iv _ ruleOpAdd = ruleOpAdd EOF { if ( state . backtracking == 0 ) { newCompositeNode ( grammarAccess . getOpAddRule ( ) ) ; } pushFollow ( FOLLOW_1 ) ; iv_ruleOpAdd = ruleOpAdd ( ) ; state . _fsp -- ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { current = iv_ruleOpAdd . getText ( ) ; } match ( input , EOF , FOLLOW_2 ) ; if ( state . failed ) return current ; } } catch ( RecognitionException re ) { recover ( input , re ) ; appendSkippedTokens ( ) ; } finally { } return current ;
public class JCGLProjectionMatrices { /** * < p > Calculate a matrix that will produce a perspective projection based on * the given view frustum parameters , the aspect ratio of the viewport and a * given horizontal field of view in radians . Note that { @ code fov _ radians } * represents the full horizontal field of view : the angle at the base of the * triangle formed by the frustum on the { @ code x / z } plane . < / p > * < p > Note that iff { @ code z _ far & gt ; = Double . POSITIVE _ INFINITY } , the * function produces an " infinite projection matrix " , suitable for use in code * that deals with shadow volumes . < / p > * < p > The function assumes a right - handed coordinate system . < / p > * < p > See * < a href = " http : / / http . developer . nvidia . com / GPUGems / gpugems _ ch09 . html " > GPU * Gems < / a > < / p > * @ param z _ near The near clipping plane coordinate * @ param z _ far The far clipping plane coordinate * @ param aspect The aspect ratio of the viewport ; the width divided * by the height . For example , an aspect ratio of 2.0 * indicates a viewport twice as wide as it is high * @ param horizontal _ fov The horizontal field of view in radians * @ param < A > A phantom type parameter , possibly representing a * source coordinate system * @ param < B > A phantom type parameter , possibly representing a * target coordinate system * @ return A perspective projection matrix */ public static < A , B > PMatrix4x4D < A , B > perspectiveProjectionRHP ( final double z_near , final double z_far , final double aspect , final double horizontal_fov ) { } }
final double x_max = z_near * StrictMath . tan ( horizontal_fov / 2.0 ) ; final double x_min = - x_max ; final double y_max = x_max / aspect ; final double y_min = - y_max ; return frustumProjectionRHP ( x_min , x_max , y_min , y_max , z_near , z_far ) ;
public class FlowTriggerScheduler { /** * Retrieve the list of scheduled flow triggers from quartz database */ public List < ScheduledFlowTrigger > getScheduledFlowTriggerJobs ( ) { } }
try { final Scheduler quartzScheduler = this . scheduler . getScheduler ( ) ; final List < String > groupNames = quartzScheduler . getJobGroupNames ( ) ; final List < ScheduledFlowTrigger > flowTriggerJobDetails = new ArrayList < > ( ) ; for ( final String groupName : groupNames ) { final JobKey jobKey = new JobKey ( FlowTriggerQuartzJob . JOB_NAME , groupName ) ; ScheduledFlowTrigger scheduledFlowTrigger = null ; try { final JobDetail job = quartzScheduler . getJobDetail ( jobKey ) ; final JobDataMap jobDataMap = job . getJobDataMap ( ) ; final String flowId = jobDataMap . getString ( FlowTriggerQuartzJob . FLOW_ID ) ; final int projectId = jobDataMap . getInt ( FlowTriggerQuartzJob . PROJECT_ID ) ; final FlowTrigger flowTrigger = ( FlowTrigger ) jobDataMap . get ( FlowTriggerQuartzJob . FLOW_TRIGGER ) ; final String submitUser = jobDataMap . getString ( FlowTriggerQuartzJob . SUBMIT_USER ) ; final List < ? extends Trigger > quartzTriggers = quartzScheduler . getTriggersOfJob ( jobKey ) ; final boolean isPaused = this . scheduler . isJobPaused ( FlowTriggerQuartzJob . JOB_NAME , groupName ) ; final Project project = projectManager . getProject ( projectId ) ; final Flow flow = project . getFlow ( flowId ) ; scheduledFlowTrigger = new ScheduledFlowTrigger ( projectId , this . projectManager . getProject ( projectId ) . getName ( ) , flowId , flowTrigger , submitUser , quartzTriggers . isEmpty ( ) ? null : quartzTriggers . get ( 0 ) , isPaused , flow . isLocked ( ) ) ; } catch ( final Exception ex ) { logger . error ( "Unable to get flow trigger by job key {}" , jobKey , ex ) ; scheduledFlowTrigger = null ; } flowTriggerJobDetails . add ( scheduledFlowTrigger ) ; } return flowTriggerJobDetails ; } catch ( final Exception ex ) { logger . error ( "Unable to get scheduled flow triggers" , ex ) ; return new ArrayList < > ( ) ; }
public class ProcfsBasedProcessTree { /** * Get the list of all processes in the system . */ private List < Integer > getProcessList ( ) { } }
String [ ] processDirs = ( new File ( procfsDir ) ) . list ( ) ; List < Integer > processList = new ArrayList < Integer > ( ) ; for ( String dir : processDirs ) { try { int pd = Integer . parseInt ( dir ) ; if ( ( new File ( procfsDir , dir ) ) . isDirectory ( ) ) { processList . add ( Integer . valueOf ( pd ) ) ; } } catch ( NumberFormatException n ) { // skip this directory } catch ( SecurityException s ) { // skip this process } } return processList ;
public class Gauge { /** * Defines the behavior of the visualization where the needle / bar should * always return to 0 after it reached the final value . This setting only makes * sense if animated = = true and the data rate is not too high . * Set to false when using real measured live data . * @ param IS _ TRUE */ public void setReturnToZero ( final boolean IS_TRUE ) { } }
if ( null == returnToZero ) { _returnToZero = Double . compare ( getMinValue ( ) , 0.0 ) <= 0 ? IS_TRUE : false ; fireUpdateEvent ( REDRAW_EVENT ) ; } else { returnToZero . set ( IS_TRUE ) ; }
public class StreamMarshaller { /** * Marshall the given parameter object . */ public void marshall ( Stream stream , ProtocolMarshaller protocolMarshaller ) { } }
if ( stream == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( stream . getStreamId ( ) , STREAMID_BINDING ) ; protocolMarshaller . marshall ( stream . getFileId ( ) , FILEID_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class GenericsResolutionUtils { /** * Analyze interface generics . If type is contained in known types - no generics resolution performed * ( trust provided info ) . * @ param types resolved generics of already analyzed types * @ param knownTypes type generics known before analysis ( some middle class generics are known ) * @ param iface interface to analyze * @ param hostType class implementing interface ( where generics actually defined ) * @ param ignoreClasses classes to ignore during analysis */ private static void analyzeInterface ( final Map < Class < ? > , LinkedHashMap < String , Type > > types , final Map < Class < ? > , LinkedHashMap < String , Type > > knownTypes , final Type iface , final Class < ? > hostType , final List < Class < ? > > ignoreClasses ) { } }
final Class interfaceType = iface instanceof ParameterizedType ? ( Class ) ( ( ParameterizedType ) iface ) . getRawType ( ) : ( Class ) iface ; if ( ! ignoreClasses . contains ( interfaceType ) ) { if ( knownTypes . containsKey ( interfaceType ) ) { // check possibly already resolved generics ( if provided externally ) types . put ( interfaceType , knownTypes . get ( interfaceType ) ) ; } else if ( iface instanceof ParameterizedType ) { final ParameterizedType parametrization = ( ParameterizedType ) iface ; final LinkedHashMap < String , Type > generics = resolveGenerics ( parametrization , types . get ( hostType ) ) ; if ( types . containsKey ( interfaceType ) ) { // class hierarchy may contain multiple implementations for the same interface // in this case we can merge known generics , using most specific types // ( root type unifies interfaces , so we just collecting actual maximum known info // from multiple sources ) merge ( interfaceType , generics , types . get ( interfaceType ) ) ; } types . put ( interfaceType , generics ) ; } else if ( interfaceType . getTypeParameters ( ) . length > 0 ) { // root class didn ' t declare generics types . put ( interfaceType , resolveRawGenerics ( interfaceType ) ) ; } else if ( ! GROOVY_OBJECT . equals ( interfaceType . getSimpleName ( ) ) ) { // avoid groovy specific interface ( all groovy objects implements it ) types . put ( interfaceType , EmptyGenericsMap . getInstance ( ) ) ; } analyzeType ( types , interfaceType , knownTypes , ignoreClasses ) ; }
public class AbstractTransitionBuilder { /** * Transits a float property from the start value to the end value . * @ param propertyId * @ param vals * @ return self */ public T transitInt ( int propertyId , int ... vals ) { } }
String property = getPropertyName ( propertyId ) ; mHolders . put ( propertyId , PropertyValuesHolder . ofInt ( property , vals ) ) ; mShadowHolders . put ( propertyId , ShadowValuesHolder . ofInt ( property , vals ) ) ; return self ( ) ;
public class HourRange { /** * Converts a given string into an instance of this class . * @ param str * String to convert . * @ return New instance . */ @ Nullable public static HourRange valueOf ( @ Nullable final String str ) { } }
if ( str == null ) { return null ; } return new HourRange ( str ) ;
public class WhileyFileParser { /** * A headless statement is one which has no identifying keyword . The set of * headless statements include assignments , invocations , variable * declarations and named blocks . * @ param scope * The enclosing scope for this statement , which determines the * set of visible ( i . e . declared ) variables and also the current * indentation level . * @ return */ private Stmt parseHeadlessStatement ( EnclosingScope scope ) { } }
int start = index ; // See if it is a named block Identifier blockName = parseOptionalIdentifier ( scope ) ; if ( blockName != null ) { if ( tryAndMatch ( true , Colon ) != null && isAtEOL ( ) ) { int end = index ; matchEndLine ( ) ; scope = scope . newEnclosingScope ( ) ; scope . declareLifetime ( blockName ) ; Stmt . Block body = parseBlock ( scope , false ) ; return annotateSourceLocation ( new Stmt . NamedBlock ( blockName , body ) , start ) ; } else { index = start ; // backtrack } } // assignment : Identifier | LeftBrace | Star // variable decl : Identifier | LeftBrace | LeftCurly | Ampersand // invoke : Identifier | LeftBrace | Star if ( tryAndMatch ( false , Final ) != null || ( skipType ( scope ) && tryAndMatch ( false , Identifier ) != null ) ) { // Must be a variable declaration as this is the only situation in which a type // can be followed by an identifier . index = start ; // backtrack return parseVariableDeclaration ( scope ) ; } // Must be an assignment or invocation index = start ; // backtrack Expr e = parseExpression ( scope , false ) ; if ( e instanceof Expr . Invoke || e instanceof Expr . IndirectInvoke ) { // Must be an invocation since these are neither valid // lvals ( i . e . they cannot be assigned ) nor types . matchEndLine ( ) ; return e ; } else { // At this point , the only remaining option is an assignment statement . // Therefore , it must be that . index = start ; // backtrack return parseAssignmentStatement ( scope ) ; }
public class ChangeTagsForResourceRequest { /** * A complex type that contains a list of the tags that you want to add to the specified health check or hosted zone * and / or the tags that you want to edit < code > Value < / code > for . * You can add a maximum of 10 tags to a health check or a hosted zone . * @ param addTags * A complex type that contains a list of the tags that you want to add to the specified health check or * hosted zone and / or the tags that you want to edit < code > Value < / code > for . < / p > * You can add a maximum of 10 tags to a health check or a hosted zone . */ public void setAddTags ( java . util . Collection < Tag > addTags ) { } }
if ( addTags == null ) { this . addTags = null ; return ; } this . addTags = new com . amazonaws . internal . SdkInternalList < Tag > ( addTags ) ;
public class Stamps { /** * Returns true if field corresponding to fieldID is set ( has been assigned a value ) and false * otherwise */ public boolean isSet ( _Fields field ) { } }
if ( field == null ) { throw new java . lang . IllegalArgumentException ( ) ; } switch ( field ) { case TX_STAMPS_START : return isSetTxStampsStart ( ) ; case GC_STAMP : return isSetGcStamp ( ) ; } throw new java . lang . IllegalStateException ( ) ;
public class CmsSessionManager { /** * Returns the broadcast queue for the given OpenCms session id . < p > * @ param sessionId the OpenCms session id to get the broadcast queue for * @ return the broadcast queue for the given OpenCms session id */ public Buffer getBroadcastQueue ( String sessionId ) { } }
CmsSessionInfo sessionInfo = getSessionInfo ( getSessionUUID ( sessionId ) ) ; if ( sessionInfo == null ) { // return empty message buffer if the session is gone or not available return BufferUtils . synchronizedBuffer ( new CircularFifoBuffer ( CmsSessionInfo . QUEUE_SIZE ) ) ; } return sessionInfo . getBroadcastQueue ( ) ;
public class XmlBean { /** * Internal method for handling the configuration of an object . This method * is recursively called for simple and complex properties . */ private void doConfigure ( XmlParser . Node rootNode , Object obj , HashMap < String , String > properties , boolean checkForDuplicates , CollectionHelper ch ) throws XmlBeanException { } }
// loop thru all child nodes if ( rootNode . hasChildren ( ) ) { for ( XmlParser . Node node : rootNode . getChildren ( ) ) { // tag name represents a property we ' re going to set String propertyName = node . getTag ( ) ; // find the property if it exists BeanProperty property = null ; if ( ch != null ) { // make sure node tag name matches propertyName if ( ! propertyName . equals ( ch . getValueProperty ( ) . getName ( ) ) ) { throw new PropertyNotFoundException ( propertyName , node . getPath ( ) , obj . getClass ( ) , "Collection can only be configured with a property name of [" + ch . getValueProperty ( ) . getName ( ) + "] but [" + propertyName + "] was used instead" ) ; } property = ch . getValueProperty ( ) ; } else { try { property = BeanUtil . findBeanProperty ( obj . getClass ( ) , propertyName , true ) ; } catch ( IllegalAccessException e ) { throw new PropertyPermissionException ( propertyName , node . getPath ( ) , obj . getClass ( ) , "Illegal access while attempting to reflect property from class" , e ) ; } } // if property is null , then this isn ' t a valid property on this object if ( property == null ) { throw new PropertyNotFoundException ( propertyName , node . getPath ( ) , obj . getClass ( ) , "Property [" + propertyName + "] not found" ) ; } // only some attributes are permitted if we ' re dealing with a // collection or map at this point boolean isCollection = ( Collection . class . isAssignableFrom ( property . getType ( ) ) ) ; boolean isMap = ( Map . class . isAssignableFrom ( property . getType ( ) ) ) ; // were any attributes included ? String typeAttrString = null ; String valueAttrString = null ; String keyAttrString = null ; // check if an annotation is present for the field if ( property . getField ( ) != null ) { XmlBeanProperty annotation = property . getField ( ) . getAnnotation ( XmlBeanProperty . class ) ; if ( annotation != null ) { if ( ! StringUtil . isEmpty ( annotation . value ( ) ) ) { valueAttrString = annotation . value ( ) ; } if ( ! StringUtil . isEmpty ( annotation . key ( ) ) ) { keyAttrString = annotation . key ( ) ; } } } // process attributes within the xml itself if ( node . hasAttributes ( ) ) { for ( Attribute attr : node . getAttributes ( ) ) { if ( attr . getName ( ) . equals ( "type" ) ) { typeAttrString = attr . getValue ( ) ; } else if ( attr . getName ( ) . equals ( "value" ) && ( isCollection || isMap ) ) { // only permitted on collections or map valueAttrString = attr . getValue ( ) ; } else if ( attr . getName ( ) . equals ( "key" ) && ( isMap || ( ch != null && ch . isMapType ( ) ) ) ) { // only permitted on map type OR on a map value keyAttrString = attr . getValue ( ) ; } else { throw new PropertyNoAttributesExpectedException ( propertyName , node . getPath ( ) , obj . getClass ( ) , "One or more attributes not allowed for property [" + propertyName + "]" ) ; } } } // otherwise , the property exists , attempt to set it // is there actually a " setter " method - - we shouldn ' t let // user ' s be able to configure fields in this case // unless accessing private properties is allowed // unless a collection helper is also null if ( ch == null && ! this . accessPrivateProperties && property . getAddMethod ( ) == null && property . getSetMethod ( ) == null ) { throw new PropertyPermissionException ( propertyName , node . getPath ( ) , obj . getClass ( ) , "Not permitted to add or set property [" + propertyName + "]" ) ; } // if we can " add " this property , then turn off checkForDuplicates // we also don ' t check for duplicates in the case of a collection if ( ch != null || property . canAdd ( ) ) { checkForDuplicates = false ; } // was this property already previously set ? // only use this check if an " add " method doesn ' t exist for the bean if ( checkForDuplicates && properties . containsKey ( node . getPath ( ) ) ) { throw new PropertyAlreadySetException ( propertyName , node . getPath ( ) , obj . getClass ( ) , "Property [" + propertyName + "] was already previously set in the xml" ) ; } // add this property to our hashmap properties . put ( node . getPath ( ) , null ) ; // if a " type " attribute was included - check that it both exists // and is compatible with the type of the property it is being added / set to Class typeAttrClass = null ; if ( typeAttrString != null ) { try { typeAttrClass = Class . forName ( typeAttrString ) ; } catch ( ClassNotFoundException e ) { throw new PropertyInvalidTypeException ( propertyName , node . getPath ( ) , obj . getClass ( ) , "Unable to find class [" + typeAttrString + "] specified in type attribute of property '" + propertyName + "'" ) ; } if ( ! property . getType ( ) . isAssignableFrom ( typeAttrClass ) ) { throw new PropertyInvalidTypeException ( propertyName , node . getPath ( ) , obj . getClass ( ) , "Unable to assign a value of specified type [" + typeAttrString + "] to property [" + propertyName + "] which is a type [" + property . getType ( ) . getName ( ) + "]" ) ; } } // the object we ' ll eventually add or set Object value = null ; // get the node ' s text value String nodeText = node . getText ( ) ; // is this a simple conversion ? if ( TypeConverterUtil . isSupported ( property . getType ( ) ) ) { // was any text set ? if not , throw an exception if ( nodeText == null ) { throw new PropertyIsEmptyException ( propertyName , node . getPath ( ) , obj . getClass ( ) , "Value for property [" + propertyName + "] was empty in xml" ) ; } // try to convert this to a Java object value try { value = TypeConverterUtil . convert ( nodeText , property . getType ( ) ) ; } catch ( ConversionException e ) { throw new PropertyConversionException ( propertyName , node . getPath ( ) , obj . getClass ( ) , "The value [" + nodeText + "] for property [" + propertyName + "] could not be converted to a(n) " + property . getType ( ) . getSimpleName ( ) + ". " + e . getMessage ( ) ) ; } // otherwise , this is a " complicated " type } else { // only " get " the property if its possible - - e . g . if there // is only an addXXXX method available , then this would throw // an exception , so we ' ll check to see if getting the property // is possible first if ( property . canGet ( ) ) { try { value = property . get ( obj ) ; } catch ( IllegalAccessException e ) { throw new PropertyPermissionException ( propertyName , node . getPath ( ) , obj . getClass ( ) , "Illegal access while attempting to get property value from object" , e ) ; } catch ( InvocationTargetException e ) { Throwable t = e ; // this generally means the setXXXX method on the object // threw an exception - - we want to unwrap that and just // return that exception instead if ( e . getCause ( ) != null ) { t = e . getCause ( ) ; } throw new PropertyInvocationException ( propertyName , node . getPath ( ) , obj . getClass ( ) , "The existing value for property [" + propertyName + "] caused an exception during get" , t . getMessage ( ) , t ) ; } } // if null , then we need to create a new instance of it if ( value == null ) { Class newType = property . getType ( ) ; // create a new instance of either the actual type OR the // type specified in the " type " attribute if ( typeAttrClass != null ) { newType = typeAttrClass ; } try { value = newType . newInstance ( ) ; } catch ( InstantiationException e ) { throw new XmlBeanClassException ( "Failed while attempting to create object of type " + newType . getName ( ) , e ) ; } catch ( IllegalAccessException e ) { throw new PropertyPermissionException ( propertyName , node . getPath ( ) , obj . getClass ( ) , "Illegal access while attempting to create new instance of " + newType . getName ( ) , e ) ; } } // special handling for " collections " - - required for handling // the values in configuring of child objects CollectionHelper newch = null ; if ( value instanceof Collection || value instanceof Map ) { newch = createCollectionHelper ( node , obj , value , propertyName , property , valueAttrString , keyAttrString ) ; } // recursively configure the next object doConfigure ( node , value , properties , checkForDuplicates , newch ) ; } // save this reference object back ( since it was successfully configured ) if ( ch != null ) { if ( ch . isCollectionType ( ) ) { ch . getCollectionObject ( ) . add ( value ) ; } else if ( ch . isMapType ( ) ) { // need to figure out the key value - - it may either be a value from the value OR a simple type Object keyValue = null ; if ( ch . getKeyProperty ( ) == null ) { // a KEY must have been set ! if ( StringUtil . isEmpty ( keyAttrString ) ) { throw new PropertyIsEmptyException ( propertyName , node . getPath ( ) , obj . getClass ( ) , "The XML attribute [key] was null or empty and is required" ) ; } else { try { keyValue = TypeConverterUtil . convert ( keyAttrString , ch . getKeyClass ( ) ) ; } catch ( ConversionException e ) { throw new PropertyConversionException ( propertyName , node . getPath ( ) , obj . getClass ( ) , "Unable to cleanly convert key value [" + keyAttrString + "] into type [" + ch . getKeyClass ( ) . getName ( ) + ": " + e . getMessage ( ) , e ) ; } } } else { try { // extract the key value from the object keyValue = ch . getKeyProperty ( ) . get ( value ) ; } catch ( Exception e ) { throw new PropertyPermissionException ( propertyName , node . getPath ( ) , value . getClass ( ) , "Unable to access property to get the value of the key: " + e . getMessage ( ) , e ) ; } if ( keyValue == null ) { throw new PropertyIsEmptyException ( propertyName , node . getPath ( ) , obj . getClass ( ) , "The value of the key [" + ch . getKeyProperty ( ) . getName ( ) + "] was null; unable to put value onto the map" ) ; } } ch . getMapObject ( ) . put ( keyValue , value ) ; } else { throw new PropertyPermissionException ( propertyName , node . getPath ( ) , obj . getClass ( ) , "Unsupported collection/map type used" ) ; } } else { try { property . addOrSet ( obj , value ) ; } catch ( InvocationTargetException e ) { Throwable t = e ; // this generally means the setXXXX method on the object // threw an exception - - we want to unwrap that and just // return that exception instead if ( e . getCause ( ) != null ) { t = e . getCause ( ) ; } throw new PropertyInvocationException ( propertyName , node . getPath ( ) , obj . getClass ( ) , "The value '" + nodeText + "' for property '" + propertyName + "' caused an exception" , t . getMessage ( ) , t ) ; } catch ( IllegalAccessException e ) { throw new PropertyPermissionException ( propertyName , node . getPath ( ) , obj . getClass ( ) , "Illegal access while setting property" , e ) ; } } } }
public class RowService { /** * Returns a { @ link ColumnFamily } composed by the non expired { @ link Cell } s of the specified { @ link ColumnFamily } . * @ param columnFamily A { @ link ColumnFamily } . * @ param timestamp The max allowed timestamp for the { @ link Cell } s . * @ return A { @ link ColumnFamily } composed by the non expired { @ link Cell } s of the specified { @ link ColumnFamily } . */ protected ColumnFamily cleanExpired ( ColumnFamily columnFamily , long timestamp ) { } }
ColumnFamily cleanColumnFamily = ArrayBackedSortedColumns . factory . create ( baseCfs . metadata ) ; for ( Cell cell : columnFamily ) { if ( cell . isLive ( timestamp ) ) { cleanColumnFamily . addColumn ( cell ) ; } } return cleanColumnFamily ;
public class StaticConnectionProvider { /** * from ConnectionProvider */ public void txConnectionFailed ( String ident , Connection conn , SQLException error ) { } }
close ( conn , ident ) ;
public class History { /** * Redos a change in the history . * @ return true if successful , false if there are no changes to redo */ public boolean redo ( ) { } }
LOGGER . trace ( "redo, before, size: " + changes . size ( ) + " pos: " + position . get ( ) + " validPos: " + validPosition . get ( ) ) ; Change nextChange = next ( ) ; if ( nextChange != null ) { doWithoutListeners ( nextChange . getSetting ( ) , nextChange :: redo ) ; LOGGER . trace ( "redo, after, size: " + changes . size ( ) + " pos: " + position . get ( ) + " validPos: " + validPosition . get ( ) ) ; return true ; } return false ;
public class JsGeometryIndexService { /** * Create a new geometry index instance . * @ param instance * The index service instance to help you create the index . * @ param type * The type for the deepest index value . * @ param values * A list of values for the children to create . * @ return The new index . */ @ ExportInstanceMethod public static GeometryIndex create ( GeometryIndexService instance , String type , int [ ] values ) { } }
if ( "geometry" . equalsIgnoreCase ( type ) ) { return instance . create ( GeometryIndexType . TYPE_GEOMETRY , values ) ; } else if ( "vertex" . equalsIgnoreCase ( type ) ) { return instance . create ( GeometryIndexType . TYPE_VERTEX , values ) ; } else if ( "edge" . equalsIgnoreCase ( type ) ) { return instance . create ( GeometryIndexType . TYPE_EDGE , values ) ; } return null ;
public class AssemblyAnalyzer { /** * Performs the analysis on a single Dependency . * @ param dependency the dependency to analyze * @ param engine the engine to perform the analysis under * @ throws AnalysisException if anything goes sideways */ @ Override public void analyzeDependency ( Dependency dependency , Engine engine ) throws AnalysisException { } }
final File test = new File ( dependency . getActualFilePath ( ) ) ; if ( ! test . isFile ( ) ) { throw new AnalysisException ( String . format ( "%s does not exist and cannot be analyzed by dependency-check" , dependency . getActualFilePath ( ) ) ) ; } if ( grokAssembly == null ) { LOGGER . warn ( "GrokAssembly didn't get deployed" ) ; return ; } if ( baseArgumentList == null ) { LOGGER . warn ( "Assembly Analyzer was unable to execute" ) ; return ; } final List < String > args = new ArrayList < > ( baseArgumentList ) ; args . add ( dependency . getActualFilePath ( ) ) ; final ProcessBuilder pb = new ProcessBuilder ( args ) ; try { final Process proc = pb . start ( ) ; final GrokParser parser = new GrokParser ( ) ; final AssemblyData data = parser . parse ( proc . getInputStream ( ) ) ; // Try evacuating the error stream final String errorStream = IOUtils . toString ( proc . getErrorStream ( ) , StandardCharsets . UTF_8 ) ; if ( null != errorStream && ! errorStream . isEmpty ( ) ) { LOGGER . warn ( "Error from GrokAssembly: {}" , errorStream ) ; } final int rc ; try { rc = proc . waitFor ( ) ; } catch ( InterruptedException ie ) { Thread . currentThread ( ) . interrupt ( ) ; return ; } if ( rc == 3 ) { LOGGER . debug ( "{} is not a .NET assembly or executable and as such cannot be analyzed by dependency-check" , dependency . getActualFilePath ( ) ) ; return ; } else if ( rc != 0 ) { LOGGER . debug ( "Return code {} from GrokAssembly; dependency-check is unable to analyze the library: {}" , rc , dependency . getActualFilePath ( ) ) ; return ; } // First , see if there was an error final String error = data . getError ( ) ; if ( error != null && ! error . isEmpty ( ) ) { throw new AnalysisException ( error ) ; } if ( data . getWarning ( ) != null ) { LOGGER . debug ( "Grok Assembly - could not get namespace on dependency `{}` - ()" , dependency . getActualFilePath ( ) , data . getWarning ( ) ) ; } final StringBuilder sb = new StringBuilder ( ) ; if ( ! StringUtils . isEmpty ( data . getFileDescription ( ) ) ) { sb . append ( data . getFileDescription ( ) ) ; } if ( ! StringUtils . isEmpty ( data . getComments ( ) ) ) { if ( sb . length ( ) > 0 ) { sb . append ( "\n\n" ) ; } sb . append ( data . getComments ( ) ) ; } if ( ! StringUtils . isEmpty ( data . getLegalCopyright ( ) ) ) { if ( sb . length ( ) > 0 ) { sb . append ( "\n\n" ) ; } sb . append ( data . getLegalCopyright ( ) ) ; } if ( ! StringUtils . isEmpty ( data . getLegalTrademarks ( ) ) ) { if ( sb . length ( ) > 0 ) { sb . append ( "\n" ) ; } sb . append ( data . getLegalTrademarks ( ) ) ; } final String description = sb . toString ( ) ; if ( description . length ( ) > 0 ) { dependency . setDescription ( description ) ; addMatchingValues ( data . getNamespaces ( ) , description , dependency , EvidenceType . VENDOR ) ; addMatchingValues ( data . getNamespaces ( ) , description , dependency , EvidenceType . PRODUCT ) ; } if ( ! StringUtils . isEmpty ( data . getProductVersion ( ) ) ) { dependency . addEvidence ( EvidenceType . VERSION , "grokassembly" , "ProductVersion" , data . getProductVersion ( ) , Confidence . HIGHEST ) ; } if ( ! StringUtils . isEmpty ( data . getFileVersion ( ) ) ) { dependency . addEvidence ( EvidenceType . VERSION , "grokassembly" , "FileVersion" , data . getFileVersion ( ) , Confidence . HIGHEST ) ; } if ( data . getFileVersion ( ) != null && data . getProductVersion ( ) != null ) { final int max = data . getFileVersion ( ) . length ( ) > data . getProductVersion ( ) . length ( ) ? data . getProductVersion ( ) . length ( ) : data . getFileVersion ( ) . length ( ) ; int pos ; for ( pos = 0 ; pos < max ; pos ++ ) { if ( data . getFileVersion ( ) . charAt ( pos ) != data . getProductVersion ( ) . charAt ( pos ) ) { break ; } } final DependencyVersion fileVersion = DependencyVersionUtil . parseVersion ( data . getFileVersion ( ) , true ) ; final DependencyVersion productVersion = DependencyVersionUtil . parseVersion ( data . getProductVersion ( ) , true ) ; if ( pos > 0 ) { final DependencyVersion matchingVersion = DependencyVersionUtil . parseVersion ( data . getFileVersion ( ) . substring ( 0 , pos ) , true ) ; if ( fileVersion . toString ( ) . length ( ) == data . getFileVersion ( ) . length ( ) ) { if ( matchingVersion != null && matchingVersion . getVersionParts ( ) . size ( ) > 2 ) { dependency . addEvidence ( EvidenceType . VERSION , "AssemblyAnalyzer" , "FilteredVersion" , matchingVersion . toString ( ) , Confidence . HIGHEST ) ; dependency . setVersion ( matchingVersion . toString ( ) ) ; } } } if ( dependency . getVersion ( ) == null ) { if ( data . getFileVersion ( ) . length ( ) >= data . getProductVersion ( ) . length ( ) ) { if ( fileVersion . toString ( ) . length ( ) == data . getFileVersion ( ) . length ( ) ) { dependency . setVersion ( fileVersion . toString ( ) ) ; } else if ( productVersion . toString ( ) . length ( ) == data . getProductVersion ( ) . length ( ) ) { dependency . setVersion ( productVersion . toString ( ) ) ; } } else { if ( productVersion . toString ( ) . length ( ) == data . getProductVersion ( ) . length ( ) ) { dependency . setVersion ( productVersion . toString ( ) ) ; } else if ( fileVersion . toString ( ) . length ( ) == data . getFileVersion ( ) . length ( ) ) { dependency . setVersion ( fileVersion . toString ( ) ) ; } } } } else if ( data . getFileVersion ( ) != null ) { final DependencyVersion version = DependencyVersionUtil . parseVersion ( data . getFileVersion ( ) , true ) ; dependency . setVersion ( version . toString ( ) ) ; } else if ( data . getProductVersion ( ) != null ) { final DependencyVersion version = DependencyVersionUtil . parseVersion ( data . getProductVersion ( ) , true ) ; dependency . setVersion ( version . toString ( ) ) ; } if ( ! StringUtils . isEmpty ( data . getCompanyName ( ) ) ) { dependency . addEvidence ( EvidenceType . VENDOR , "grokassembly" , "CompanyName" , data . getCompanyName ( ) , Confidence . HIGHEST ) ; addMatchingValues ( data . getNamespaces ( ) , data . getCompanyName ( ) , dependency , EvidenceType . VENDOR ) ; } if ( ! StringUtils . isEmpty ( data . getProductName ( ) ) ) { dependency . addEvidence ( EvidenceType . PRODUCT , "grokassembly" , "ProductName" , data . getProductName ( ) , Confidence . HIGHEST ) ; addMatchingValues ( data . getNamespaces ( ) , data . getProductName ( ) , dependency , EvidenceType . PRODUCT ) ; } if ( ! StringUtils . isEmpty ( data . getFileDescription ( ) ) ) { dependency . addEvidence ( EvidenceType . PRODUCT , "grokassembly" , "FileDescription" , data . getFileDescription ( ) , Confidence . HIGH ) ; addMatchingValues ( data . getNamespaces ( ) , data . getFileDescription ( ) , dependency , EvidenceType . PRODUCT ) ; } final String internalName = data . getInternalName ( ) ; if ( ! StringUtils . isEmpty ( internalName ) ) { dependency . addEvidence ( EvidenceType . PRODUCT , "grokassembly" , "InternalName" , internalName , Confidence . MEDIUM ) ; addMatchingValues ( data . getNamespaces ( ) , internalName , dependency , EvidenceType . PRODUCT ) ; addMatchingValues ( data . getNamespaces ( ) , internalName , dependency , EvidenceType . VENDOR ) ; if ( dependency . getName ( ) == null && StringUtils . containsIgnoreCase ( dependency . getActualFile ( ) . getName ( ) , internalName ) ) { final String ext = FileUtils . getFileExtension ( internalName ) ; if ( ext != null ) { dependency . setName ( internalName . substring ( 0 , internalName . length ( ) - ext . length ( ) - 1 ) ) ; } else { dependency . setName ( internalName ) ; } } } final String originalFilename = data . getOriginalFilename ( ) ; if ( ! StringUtils . isEmpty ( originalFilename ) ) { dependency . addEvidence ( EvidenceType . PRODUCT , "grokassembly" , "OriginalFilename" , originalFilename , Confidence . MEDIUM ) ; addMatchingValues ( data . getNamespaces ( ) , originalFilename , dependency , EvidenceType . PRODUCT ) ; if ( dependency . getName ( ) == null && StringUtils . containsIgnoreCase ( dependency . getActualFile ( ) . getName ( ) , originalFilename ) ) { final String ext = FileUtils . getFileExtension ( originalFilename ) ; if ( ext != null ) { dependency . setName ( originalFilename . substring ( 0 , originalFilename . length ( ) - ext . length ( ) - 1 ) ) ; } else { dependency . setName ( originalFilename ) ; } } } if ( dependency . getName ( ) != null && dependency . getVersion ( ) != null ) { try { dependency . addSoftwareIdentifier ( new PurlIdentifier ( "generic" , dependency . getName ( ) , dependency . getVersion ( ) , Confidence . MEDIUM ) ) ; } catch ( MalformedPackageURLException ex ) { LOGGER . debug ( "Unable to create Package URL Identifier for " + dependency . getName ( ) , ex ) ; dependency . addSoftwareIdentifier ( new GenericIdentifier ( String . format ( "%s@%s" , dependency . getName ( ) , dependency . getVersion ( ) ) , Confidence . MEDIUM ) ) ; } } dependency . setEcosystem ( DEPENDENCY_ECOSYSTEM ) ; } catch ( GrokParseException saxe ) { LOGGER . error ( "----------------------------------------------------" ) ; LOGGER . error ( "Failed to read the Assembly Analyzer results." ) ; LOGGER . error ( "----------------------------------------------------" ) ; throw new AnalysisException ( "Couldn't parse Assembly Analyzer results (GrokAssembly)" , saxe ) ; } catch ( IOException ioe ) { throw new AnalysisException ( ioe ) ; }
public class Schema { /** * Creates a { @ link Type # MAP MAP } { @ link Schema } of the given key and value types . * @ param keySchema Schema of the map key . * @ param valueSchema Schema of the map value * @ return A { @ link Schema } of { @ link Type # MAP MAP } type . */ public static Schema mapOf ( Schema keySchema , Schema valueSchema ) { } }
return new Schema ( Type . MAP , null , null , keySchema , valueSchema , null , null , null ) ;
public class Reflecter { /** * Populate the JavaBeans properties of this delegate object , based on the specified name / value pairs * @ param properties * @ return */ public < V > Reflecter < T > populate ( Map < String , V > properties , List < String > excludes ) { } }
if ( Decisions . isEmpty ( ) . apply ( properties ) ) { return this ; } if ( this . delegate . get ( ) . getClass ( ) . isArray ( ) ) { Object els = null ; if ( null != ( els = properties . get ( JSONer . ReadJSON . itemsF ) ) ) { this . delegate = Optional . fromNullable ( Resolves . < T > get ( this . delegate . get ( ) . getClass ( ) , els ) ) ; } return this ; } if ( this . autoExchange ) { autoExchange ( ) ; } fieldLoop ( new TransformMap2ObjVal < V > ( properties , excludes ) ) ; this . isChanged = true ; return this ;
public class PlainTextConverter { /** * Called when a { @ link WtTableRow table row } is about to be processed . * @ param n A node representing a table row . */ public void visit ( WtTableRow n ) { } }
if ( currentRow == null ) { currentRow = new ArrayList < String > ( ) ; iterate ( n ) ; if ( currentRow . size ( ) > 0 ) { rows . add ( currentRow ) ; } if ( currentRow . size ( ) == n . getBody ( ) . size ( ) ) { StringBuilder tableRowFormatted = new StringBuilder ( ) ; for ( int i = 0 ; i < currentRow . size ( ) ; i ++ ) { tableRowFormatted . append ( currentRow . get ( i ) ) ; if ( i + 1 < currentRow . size ( ) ) { // appending a separator char only in between cells here tableRowFormatted . append ( '|' ) ; } } writeWord ( tableRowFormatted . toString ( ) ) ; } currentRow = null ; }
public class NotificationBoard { /** * Get { @ link NotificationEntry } by its id . * @ param notification * @ return NotificationEntry */ public NotificationEntry getNotification ( int notification ) { } }
RowView rowView = getRowView ( notification ) ; return rowView != null ? rowView . getNotification ( ) : null ;
public class AbstractIoSession { /** * { @ inheritDoc } */ @ Override public final Object getCurrentWriteMessage ( ) { } }
WriteRequest req = getCurrentWriteRequest ( ) ; if ( req == null ) { return null ; } return req . getMessage ( ) ;