signature
stringlengths 43
39.1k
| implementation
stringlengths 0
450k
|
|---|---|
public class DeleteFileExtensions { /** * Tries to delete a file and if its a directory than its deletes all the sub - directories .
* @ param file
* The File to delete .
* @ throws IOException
* Signals that an I / O exception has occurred . */
public static void delete ( final @ NonNull File file ) throws IOException { } }
|
if ( file . isDirectory ( ) ) { DeleteFileExtensions . deleteAllFiles ( file ) ; } else { String error = null ; // If the file is not deleted
if ( ! file . delete ( ) ) { error = "Cannot delete the File " + file . getAbsolutePath ( ) + "." ; throw new IOException ( error ) ; } }
|
public class CommerceDiscountUtil { /** * Returns all the commerce discounts that the user has permission to view where groupId = & # 63 ; and couponCode = & # 63 ; .
* @ param groupId the group ID
* @ param couponCode the coupon code
* @ return the matching commerce discounts that the user has permission to view */
public static List < CommerceDiscount > filterFindByG_C ( long groupId , String couponCode ) { } }
|
return getPersistence ( ) . filterFindByG_C ( groupId , couponCode ) ;
|
public class BoxFactory { /** * Creates the box subtrees for all the child nodes of the DOM node corresponding to the box creatin status . Recursively creates the child boxes
* from the child nodes .
* @ param stat current tree creation status used for determining the parents */
public void createBoxTree ( BoxTreeCreationStatus stat ) { } }
|
boolean generated = false ; do { if ( stat . parent . isDisplayed ( ) ) { // add previously created boxes ( the rest from the last twin )
if ( stat . parent . preadd != null ) { addToTree ( stat . parent . preadd , stat ) ; stat . parent . preadd = null ; // don ' t need to keep this anymore
} // create : before elements
if ( stat . parent . previousTwin == null ) { Node n = createPseudoElement ( stat . parent , PseudoElementType . BEFORE ) ; if ( n != null && ( n . getNodeType ( ) == Node . ELEMENT_NODE || n . getNodeType ( ) == Node . TEXT_NODE ) ) { stat . curchild = - 1 ; createSubtree ( n , stat ) ; } } // create normal elements
NodeList children = stat . parent . getElement ( ) . getChildNodes ( ) ; for ( int child = stat . parent . firstDOMChild ; child < stat . parent . lastDOMChild ; child ++ ) { Node n = children . item ( child ) ; if ( n . getNodeType ( ) == Node . ELEMENT_NODE || n . getNodeType ( ) == Node . TEXT_NODE ) { stat . curchild = child ; createSubtree ( n , stat ) ; } } // create : after elements
if ( stat . parent . nextTwin == null ) { Node n = createPseudoElement ( stat . parent , PseudoElementType . AFTER ) ; if ( n != null && ( n . getNodeType ( ) == Node . ELEMENT_NODE || n . getNodeType ( ) == Node . TEXT_NODE ) ) { stat . curchild = children . getLength ( ) ; createSubtree ( n , stat ) ; } } normalizeBox ( stat . parent ) ; } // if a twin box has been created , continue creating the unprocessed boxes in the twin box
if ( stat . parent . nextTwin != null ) { stat . parent = stat . parent . nextTwin ; generated = true ; } else generated = false ; } while ( generated ) ;
|
public class OSGiServiceRegistryProxyTargetLocator { /** * < p > locateProxyTarget . < / p >
* @ return a { @ link org . ops4j . pax . wicket . spi . ReleasableProxyTarget } object . */
public ReleasableProxyTarget locateProxyTarget ( ) { } }
|
ServiceReference < ? > [ ] references = fetchReferences ( ) ; if ( references != null ) { // Sort the references . . .
Arrays . sort ( references ) ; // Fetch the first ( if any ) . . .
for ( final ServiceReference < ? > reference : references ) { final Object service = bundleContext . getService ( reference ) ; if ( service == null ) { // The service is gone while we where iterating over the service references . . .
continue ; } // And return a releasable proxy target . . .
return new ReleasableProxyTargetImplementation ( service , reference ) ; } } throw new IllegalStateException ( "can't find any service matching objectClass = " + serviceInterface + " and filter = " + filterString ) ;
|
public class XPathParser { /** * Checks if a given token represents a ForwardAxis .
* @ return true if the token is a ForwardAxis */
private boolean isForwardAxis ( ) { } }
|
final String content = mToken . getContent ( ) ; return ( mToken . getType ( ) == TokenType . TEXT && ( "child" . equals ( content ) || ( "descendant" . equals ( content ) || "descendant-or-self" . equals ( content ) || "attribute" . equals ( content ) || "self" . equals ( content ) || "following" . equals ( content ) || "following-sibling" . equals ( content ) || "namespace" . equals ( content ) ) ) ) ;
|
public class SSLConnectionContextImpl { /** * @ see com . ibm . wsspi . tcpchannel . SSLConnectionContext # getSession ( ) */
public SSLSession getSession ( ) { } }
|
if ( this . sslConnLink != null && this . sslConnLink . getSSLEngine ( ) != null ) { return this . sslConnLink . getSSLEngine ( ) . getSession ( ) ; } return null ;
|
public class StanzaCollector { /** * Returns the next available stanza . The method call will block ( not return ) until a stanza is available or the
* < tt > timeout < / tt > has elapsed or if the connection was terminated because of an error . If the timeout elapses without a
* result or if there was an connection error , < tt > null < / tt > will be returned .
* @ param < P > type of the result stanza .
* @ param timeout the timeout in milliseconds .
* @ return the next available stanza or < code > null < / code > on timeout or connection error .
* @ throws InterruptedException */
@ SuppressWarnings ( "unchecked" ) public < P extends Stanza > P nextResult ( long timeout ) throws InterruptedException { } }
|
throwIfCancelled ( ) ; P res = null ; long remainingWait = timeout ; waitStart = System . currentTimeMillis ( ) ; while ( remainingWait > 0 && connectionException == null && ! cancelled ) { synchronized ( this ) { res = ( P ) resultQueue . poll ( ) ; if ( res != null ) { return res ; } wait ( remainingWait ) ; } remainingWait = timeout - ( System . currentTimeMillis ( ) - waitStart ) ; } return res ;
|
public class AfplibPackageImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
public EEnum getPGPConstant ( ) { } }
|
if ( pgpConstantEEnum == null ) { pgpConstantEEnum = ( EEnum ) EPackage . Registry . INSTANCE . getEPackage ( AfplibPackage . eNS_URI ) . getEClassifiers ( ) . get ( 62 ) ; } return pgpConstantEEnum ;
|
public class Record { /** * Gets the field at the given position . If the field at that position is null , then this method leaves
* the target field unchanged and returns false .
* @ param fieldNum The position of the field .
* @ param target The value to deserialize the field into .
* @ return True , if the field was deserialized properly , false , if the field was null . */
public boolean getFieldInto ( int fieldNum , Value target ) { } }
|
// range check
if ( fieldNum < 0 || fieldNum >= this . numFields ) { throw new IndexOutOfBoundsException ( ) ; } // get offset and check for null
int offset = this . offsets [ fieldNum ] ; if ( offset == NULL_INDICATOR_OFFSET ) { return false ; } else if ( offset == MODIFIED_INDICATOR_OFFSET ) { // value that has been set is new or modified
// bring the binary in sync so that the deserialization gives the correct result
updateBinaryRepresenation ( ) ; offset = this . offsets [ fieldNum ] ; } final int limit = offset + this . lengths [ fieldNum ] ; deserialize ( target , offset , limit , fieldNum ) ; return true ;
|
public class BiIterator { /** * It ' s preferred to call < code > forEachRemaining ( Try . BiConsumer ) < / code > to avoid the create the unnecessary < code > Pair < / code > Objects .
* @ deprecated */
@ Override @ Deprecated public void forEachRemaining ( java . util . function . Consumer < ? super Pair < A , B > > action ) { } }
|
super . forEachRemaining ( action ) ;
|
public class ArrayUtils { /** * Removes an item from the array .
* If the item has been found , a new array is returned where this item is removed . Otherwise the original array is returned .
* @ param src the src array
* @ param object the object to remove
* @ param < T > the type of the array
* @ return the resulting array */
public static < T > T [ ] remove ( T [ ] src , T object ) { } }
|
int index = indexOf ( src , object ) ; if ( index == - 1 ) { return src ; } T [ ] dst = ( T [ ] ) Array . newInstance ( src . getClass ( ) . getComponentType ( ) , src . length - 1 ) ; System . arraycopy ( src , 0 , dst , 0 , index ) ; if ( index < src . length - 1 ) { System . arraycopy ( src , index + 1 , dst , index , src . length - index - 1 ) ; } return dst ;
|
public class ExpandableGridView { /** * Notifies the listener , which has been registered to be notified , when a child has been
* clicked , about a child being clicked .
* @ param view
* The view within the expandable grid view , which has been clicked , as an instance of
* the class { @ link View } . The view may not be null
* @ param groupIndex
* The index of the group , the child , which has been clicked , belongs to , as an { @ link
* Integer } value
* @ param childIndex
* The index of the child , which has been clicked , as an { @ link Integer } value
* @ param id
* The id of the child , which has been clicked , as a { @ link Long } value
* @ return True , if the click has been handled by the listener , false otherwise */
private boolean notifyOnChildClicked ( @ NonNull final View view , final int groupIndex , final int childIndex , final long id ) { } }
|
return childClickListener != null && childClickListener . onChildClick ( this , view , groupIndex , childIndex , id ) ;
|
public class DiscoveryClientResolverFactory { /** * Triggers a refresh of the registered name resolvers .
* @ param event The event that triggered the update . */
@ EventListener ( HeartbeatEvent . class ) public void heartbeat ( final HeartbeatEvent event ) { } }
|
if ( this . monitor . update ( event . getValue ( ) ) ) { for ( final DiscoveryClientNameResolver discoveryClientNameResolver : this . discoveryClientNameResolvers ) { discoveryClientNameResolver . refresh ( ) ; } }
|
public class MigrationRepository { /** * Ensures that every path starts and ends with a slash character .
* @ param scriptPath the scriptPath that needs to be normalized
* @ return a path with leading and trailing slash */
private String normalizePath ( String scriptPath ) { } }
|
StringBuilder builder = new StringBuilder ( scriptPath . length ( ) + 1 ) ; if ( scriptPath . startsWith ( "/" ) ) { builder . append ( scriptPath . substring ( 1 ) ) ; } else { builder . append ( scriptPath ) ; } if ( ! scriptPath . endsWith ( "/" ) ) { builder . append ( "/" ) ; } return builder . toString ( ) ;
|
public class SocialNetworkStructureBuilder { /** * Build all combinations of graph structures for generic event stubs of a maximum length
* @ param length Maximum number of nodes in each to generate
* @ return All graph combinations of specified length or less */
public Vector < Graph < UserStub > > generateAllNodeDataTypeGraphCombinationsOfMaxLength ( int length ) { } }
|
Vector < Graph < UserStub > > graphs = super . generateAllNodeDataTypeGraphCombinationsOfMaxLength ( length ) ; if ( WRITE_STRUCTURES_IN_PARALLEL ) { // Left as an exercise to the student .
throw new NotImplementedError ( ) ; } else { int i = 0 ; for ( Iterator < Graph < UserStub > > iter = graphs . toIterator ( ) ; iter . hasNext ( ) ; ) { Graph < UserStub > graph = iter . next ( ) ; graph . setGraphId ( "S_" + ++ i + "_" + graph . allNodes ( ) . size ( ) ) ; graph . writeDotFile ( outDir + graph . graphId ( ) + ".gv" , false , ALSO_WRITE_AS_PNG ) ; } System . out . println ( "Wrote " + i + " graph files in DOT format to " + outDir + "" ) ; } return graphs ;
|
public class ListShardsRequestMarshaller { /** * Marshall the given parameter object . */
public void marshall ( ListShardsRequest listShardsRequest , ProtocolMarshaller protocolMarshaller ) { } }
|
if ( listShardsRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( listShardsRequest . getStreamName ( ) , STREAMNAME_BINDING ) ; protocolMarshaller . marshall ( listShardsRequest . getNextToken ( ) , NEXTTOKEN_BINDING ) ; protocolMarshaller . marshall ( listShardsRequest . getExclusiveStartShardId ( ) , EXCLUSIVESTARTSHARDID_BINDING ) ; protocolMarshaller . marshall ( listShardsRequest . getMaxResults ( ) , MAXRESULTS_BINDING ) ; protocolMarshaller . marshall ( listShardsRequest . getStreamCreationTimestamp ( ) , STREAMCREATIONTIMESTAMP_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
|
public class ReviewsImpl { /** * The reviews created would show up for Reviewers on your team . As Reviewers complete reviewing , results of the Review would be POSTED ( i . e . HTTP POST ) on the specified CallBackEndpoint .
* & lt ; h3 & gt ; CallBack Schemas & lt ; / h3 & gt ;
* & lt ; h4 & gt ; Review Completion CallBack Sample & lt ; / h4 & gt ;
* & lt ; p & gt ;
* { & lt ; br / & gt ;
* " ReviewId " : " & lt ; Review Id & gt ; " , & lt ; br / & gt ;
* " ModifiedOn " : " 2016-10-11T22:36:32.9934851Z " , & lt ; br / & gt ;
* " ModifiedBy " : " & lt ; Name of the Reviewer & gt ; " , & lt ; br / & gt ;
* " CallBackType " : " Review " , & lt ; br / & gt ;
* " ContentId " : " & lt ; The ContentId that was specified input & gt ; " , & lt ; br / & gt ;
* " Metadata " : { & lt ; br / & gt ;
* " adultscore " : " 0 . xxx " , & lt ; br / & gt ;
* " a " : " False " , & lt ; br / & gt ;
* " racyscore " : " 0 . xxx " , & lt ; br / & gt ;
* " r " : " True " & lt ; br / & gt ;
* } , & lt ; br / & gt ;
* " ReviewerResultTags " : { & lt ; br / & gt ;
* " a " : " False " , & lt ; br / & gt ;
* " r " : " True " & lt ; br / & gt ;
* } & lt ; br / & gt ;
* } & lt ; br / & gt ;
* & lt ; / p & gt ; .
* @ param teamName Your team name .
* @ param urlContentType The content type .
* @ param createReviewBody Body for create reviews API
* @ param subTeam SubTeam of your team , you want to assign the created review to .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the observable to the List & lt ; String & gt ; object */
public Observable < ServiceResponse < List < String > > > createReviewsWithServiceResponseAsync ( String teamName , String urlContentType , List < CreateReviewBodyItem > createReviewBody , String subTeam ) { } }
|
if ( this . client . baseUrl ( ) == null ) { throw new IllegalArgumentException ( "Parameter this.client.baseUrl() is required and cannot be null." ) ; } if ( teamName == null ) { throw new IllegalArgumentException ( "Parameter teamName is required and cannot be null." ) ; } if ( urlContentType == null ) { throw new IllegalArgumentException ( "Parameter urlContentType is required and cannot be null." ) ; } if ( createReviewBody == null ) { throw new IllegalArgumentException ( "Parameter createReviewBody is required and cannot be null." ) ; } Validator . validate ( createReviewBody ) ; String parameterizedHost = Joiner . on ( ", " ) . join ( "{baseUrl}" , this . client . baseUrl ( ) ) ; return service . createReviews ( teamName , urlContentType , subTeam , createReviewBody , this . client . acceptLanguage ( ) , parameterizedHost , this . client . userAgent ( ) ) . flatMap ( new Func1 < Response < ResponseBody > , Observable < ServiceResponse < List < String > > > > ( ) { @ Override public Observable < ServiceResponse < List < String > > > call ( Response < ResponseBody > response ) { try { ServiceResponse < List < String > > clientResponse = createReviewsDelegate ( response ) ; return Observable . just ( clientResponse ) ; } catch ( Throwable t ) { return Observable . error ( t ) ; } } } ) ;
|
public class SimpleXmlWriter { /** * Writes ' < elementName ' . */
public void beginElement ( String elementName ) throws IOException { } }
|
addElementName ( elementName ) ; indent ( ) ; writer . write ( "<" ) ; writer . write ( elementName ) ;
|
public class VirtualMachineImagesInner { /** * Gets a list of all virtual machine image versions for the specified location , publisher , offer , and SKU .
* @ param location The name of a supported Azure region .
* @ param publisherName A valid image publisher .
* @ param offer A valid image publisher offer .
* @ param skus A valid image SKU .
* @ param filter The filter to apply on the operation .
* @ param top the Integer value
* @ param orderby the String value
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the observable to the List & lt ; VirtualMachineImageResourceInner & gt ; object */
public Observable < ServiceResponse < List < VirtualMachineImageResourceInner > > > listWithServiceResponseAsync ( String location , String publisherName , String offer , String skus , String filter , Integer top , String orderby ) { } }
|
if ( location == null ) { throw new IllegalArgumentException ( "Parameter location is required and cannot be null." ) ; } if ( publisherName == null ) { throw new IllegalArgumentException ( "Parameter publisherName is required and cannot be null." ) ; } if ( offer == null ) { throw new IllegalArgumentException ( "Parameter offer is required and cannot be null." ) ; } if ( skus == null ) { throw new IllegalArgumentException ( "Parameter skus is required and cannot be null." ) ; } if ( this . client . subscriptionId ( ) == null ) { throw new IllegalArgumentException ( "Parameter this.client.subscriptionId() is required and cannot be null." ) ; } if ( this . client . apiVersion ( ) == null ) { throw new IllegalArgumentException ( "Parameter this.client.apiVersion() is required and cannot be null." ) ; } return service . list ( location , publisherName , offer , skus , this . client . subscriptionId ( ) , filter , top , orderby , this . client . apiVersion ( ) , this . client . acceptLanguage ( ) , this . client . userAgent ( ) ) . flatMap ( new Func1 < Response < ResponseBody > , Observable < ServiceResponse < List < VirtualMachineImageResourceInner > > > > ( ) { @ Override public Observable < ServiceResponse < List < VirtualMachineImageResourceInner > > > call ( Response < ResponseBody > response ) { try { ServiceResponse < List < VirtualMachineImageResourceInner > > clientResponse = listDelegate ( response ) ; return Observable . just ( clientResponse ) ; } catch ( Throwable t ) { return Observable . error ( t ) ; } } } ) ;
|
public class Util { /** * Quote the given string if needed
* @ param value The value to quote ( e . g . bob )
* @ return The quoted string ( e . g . " bob " ) */
public static String quote ( final String value ) { } }
|
if ( value == null ) { return null ; } String result = value ; if ( ! result . startsWith ( "\"" ) ) { result = "\"" + result ; } if ( ! result . endsWith ( "\"" ) ) { result = result + "\"" ; } return result ;
|
public class CPDefinitionLocalizationPersistenceImpl { /** * Returns the last cp definition localization in the ordered set where CPDefinitionId = & # 63 ; .
* @ param CPDefinitionId the cp definition ID
* @ param orderByComparator the comparator to order the set by ( optionally < code > null < / code > )
* @ return the last matching cp definition localization , or < code > null < / code > if a matching cp definition localization could not be found */
@ Override public CPDefinitionLocalization fetchByCPDefinitionId_Last ( long CPDefinitionId , OrderByComparator < CPDefinitionLocalization > orderByComparator ) { } }
|
int count = countByCPDefinitionId ( CPDefinitionId ) ; if ( count == 0 ) { return null ; } List < CPDefinitionLocalization > list = findByCPDefinitionId ( CPDefinitionId , count - 1 , count , orderByComparator ) ; if ( ! list . isEmpty ( ) ) { return list . get ( 0 ) ; } return null ;
|
public class ContextStore { /** * - - - Function Properties - - - */
public void storeFunctionPropertyResult ( final String uuid , final String propertyName , final Object value ) { } }
|
this . functionPropertyCache . put ( contextCacheKey ( uuid , propertyName ) , value ) ;
|
public class RestAssuredConfigurator { /** * required for rest assured base URI configuration . */
public void configure ( @ Observes ( precedence = - 200 ) ArquillianDescriptor arquillianDescriptor ) { } }
|
restAssuredConfigurationInstanceProducer . set ( RestAssuredConfiguration . fromMap ( arquillianDescriptor . extension ( "restassured" ) . getExtensionProperties ( ) ) ) ;
|
public class CartesianScaleLabel { /** * Write the options of scale label
* @ return options as JSON object
* @ throws java . io . IOException If an I / O error occurs */
public String encode ( ) throws IOException { } }
|
FastStringWriter fsw = new FastStringWriter ( ) ; try { fsw . write ( "{" ) ; ChartUtils . writeDataValue ( fsw , "display" , this . display , false ) ; ChartUtils . writeDataValue ( fsw , "labelString" , this . labelString , true ) ; ChartUtils . writeDataValue ( fsw , "lineHeight" , this . lineHeight , true ) ; ChartUtils . writeDataValue ( fsw , "fontColor" , this . fontColor , true ) ; ChartUtils . writeDataValue ( fsw , "fontFamily" , this . fontFamily , true ) ; ChartUtils . writeDataValue ( fsw , "fontSize" , this . fontSize , true ) ; ChartUtils . writeDataValue ( fsw , "fontStyle" , this . fontStyle , true ) ; ChartUtils . writeDataValue ( fsw , "padding" , this . padding , true ) ; fsw . write ( "}" ) ; } finally { fsw . close ( ) ; } return fsw . toString ( ) ;
|
public class CircuitBreakerHttpClient { /** * Creates a new decorator that binds one { @ link CircuitBreaker } per { @ link HttpMethod } with the specified
* { @ link CircuitBreakerStrategy } .
* < p > Since { @ link CircuitBreaker } is a unit of failure detection , don ' t reuse the same instance for
* unrelated services .
* @ param factory a function that takes a { @ link HttpMethod } and creates a new { @ link CircuitBreaker } */
public static Function < Client < HttpRequest , HttpResponse > , CircuitBreakerHttpClient > newPerMethodDecorator ( Function < String , CircuitBreaker > factory , CircuitBreakerStrategy strategy ) { } }
|
return newDecorator ( CircuitBreakerMapping . perMethod ( factory ) , strategy ) ;
|
public class LabelingJobInputConfigMarshaller { /** * Marshall the given parameter object . */
public void marshall ( LabelingJobInputConfig labelingJobInputConfig , ProtocolMarshaller protocolMarshaller ) { } }
|
if ( labelingJobInputConfig == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( labelingJobInputConfig . getDataSource ( ) , DATASOURCE_BINDING ) ; protocolMarshaller . marshall ( labelingJobInputConfig . getDataAttributes ( ) , DATAATTRIBUTES_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
|
public class WSConnectionRequestInfoImpl { /** * determines if two typeMaps match . Note that this method takes under account
* an Oracle 11g change with TypeMap
* @ param m1
* @ param m2
* @ return */
public static final boolean matchTypeMap ( Map < String , Class < ? > > m1 , Map < String , Class < ? > > m2 ) { } }
|
final boolean isTraceOn = TraceComponent . isAnyTracingEnabled ( ) ; if ( isTraceOn && tc . isEntryEnabled ( ) ) Tr . entry ( tc , "matchTypeMap" , new Object [ ] { m1 , m2 } ) ; boolean match = false ; if ( m1 == m2 ) match = true ; else if ( m1 != null && m1 . equals ( m2 ) ) match = true ; if ( isTraceOn && tc . isEntryEnabled ( ) ) Tr . exit ( tc , "matchTypeMap" , match ) ; return match ;
|
public class OracleNoSQLClient { /** * Iterate and store attributes .
* @ param entity
* JPA entity .
* @ param metamodel
* JPA meta model .
* @ param row
* kv row .
* @ param attributes
* JPA attributes .
* @ param schemaTable
* the schema table
* @ param metadata
* the metadata */
private void process ( Object entity , MetamodelImpl metamodel , Row row , Set < Attribute > attributes , Table schemaTable , EntityMetadata metadata ) { } }
|
for ( Attribute attribute : attributes ) { // by pass association .
if ( ! attribute . isAssociation ( ) ) { // in case of embeddable id .
if ( attribute . equals ( metadata . getIdAttribute ( ) ) && metamodel . isEmbeddable ( ( ( AbstractAttribute ) attribute ) . getBindableJavaType ( ) ) ) { processEmbeddableAttribute ( entity , metamodel , row , schemaTable , metadata , attribute ) ; } else { if ( metamodel . isEmbeddable ( ( ( AbstractAttribute ) attribute ) . getBindableJavaType ( ) ) ) { processEmbeddableAttribute ( entity , metamodel , row , schemaTable , metadata , attribute ) ; } else { setField ( row , schemaTable , entity , attribute ) ; } } } }
|
public class AtomicInteger { /** * Atomically updates the current value with the results of
* applying the given function to the current and given values ,
* returning the updated value . The function should be
* side - effect - free , since it may be re - applied when attempted
* updates fail due to contention among threads . The function
* is applied with the current value as its first argument ,
* and the given update as the second argument .
* @ param x the update value
* @ param accumulatorFunction a side - effect - free function of two arguments
* @ return the updated value
* @ since 1.8 */
public final int accumulateAndGet ( int x , IntBinaryOperator accumulatorFunction ) { } }
|
int prev , next ; do { prev = get ( ) ; next = accumulatorFunction . applyAsInt ( prev , x ) ; } while ( ! compareAndSet ( prev , next ) ) ; return next ;
|
public class BifurcatedConsumerSessionProxy { /** * This method is used to read a set of locked messages held by the message processor .
* This call will simply be passed onto the server who will call the method on the real
* bifurcated consumer session residing on the server .
* @ param msgHandles An array of message ids that denote the messages to be read .
* @ return Returns an array of SIBusMessages
* @ throws com . ibm . wsspi . sib . core . exception . SISessionUnavailableException
* @ throws com . ibm . wsspi . sib . core . exception . SISessionDroppedException
* @ throws com . ibm . wsspi . sib . core . exception . SIConnectionUnavailableException
* @ throws com . ibm . wsspi . sib . core . exception . SIConnectionDroppedException
* @ throws com . ibm . websphere . sib . exception . SIResourceException
* @ throws com . ibm . wsspi . sib . core . exception . SIConnectionLostException
* @ throws com . ibm . websphere . sib . exception . SIIncorrectCallException
* @ throws com . ibm . wsspi . sib . core . exception . SIMessageNotLockedException
* @ throws com . ibm . websphere . sib . exception . SIErrorException */
public SIBusMessage [ ] readSet ( SIMessageHandle [ ] msgHandles ) throws SISessionUnavailableException , SISessionDroppedException , SIConnectionUnavailableException , SIConnectionDroppedException , SIResourceException , SIConnectionLostException , SIIncorrectCallException , SIMessageNotLockedException , SIErrorException { } }
|
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( this , tc , "readSet" , new Object [ ] { msgHandles . length + " msg ids" } ) ; SIBusMessage [ ] messages = null ; try { closeLock . readLock ( ) . lockInterruptibly ( ) ; try { checkAlreadyClosed ( ) ; CommsByteBuffer request = getCommsByteBuffer ( ) ; request . putShort ( getConnectionObjectID ( ) ) ; request . putShort ( getProxyID ( ) ) ; request . putSIMessageHandles ( msgHandles ) ; CommsByteBuffer reply = jfapExchange ( request , JFapChannelConstants . SEG_READ_SET , JFapChannelConstants . PRIORITY_MEDIUM , true ) ; try { short err = reply . getCommandCompletionCode ( JFapChannelConstants . SEG_READ_SET_R ) ; if ( err != CommsConstants . SI_NO_EXCEPTION ) { checkFor_SISessionUnavailableException ( reply , err ) ; checkFor_SISessionDroppedException ( reply , err ) ; checkFor_SIConnectionUnavailableException ( reply , err ) ; checkFor_SIConnectionDroppedException ( reply , err ) ; checkFor_SIResourceException ( reply , err ) ; checkFor_SIConnectionLostException ( reply , err ) ; checkFor_SIIncorrectCallException ( reply , err ) ; checkFor_SIMessageNotLockedException ( reply , err ) ; checkFor_SIErrorException ( reply , err ) ; defaultChecker ( reply , err ) ; } int numberOfMessages = reply . getInt ( ) ; messages = new SIBusMessage [ numberOfMessages ] ; for ( int x = 0 ; x < numberOfMessages ; x ++ ) { messages [ x ] = reply . getMessage ( getCommsConnection ( ) ) ; } } finally { reply . release ( false ) ; } } finally { closeLock . readLock ( ) . unlock ( ) ; } } catch ( InterruptedException e ) { // No FFDC Code needed
} if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( this , tc , "readSet" ) ; return messages ;
|
public class RSAUtils { /** * 使用模和指数生成RSA私钥
* 注意 : 【 此代码用了默认补位方式 , 为RSA / None / PKCS1Padding , 不同JDK默认的补位方式可能不同 , 如Android默认是RSA
* / None / NoPadding 】
* @ param modulus
* @ param exponent
* 指数
* @ return */
public static RSAPrivateKey getPrivateKey ( String modulus , String exponent ) { } }
|
try { BigInteger b1 = new BigInteger ( modulus ) ; BigInteger b2 = new BigInteger ( exponent ) ; KeyFactory keyFactory = KeyFactory . getInstance ( KEY_ALGORITHM ) ; RSAPrivateKeySpec keySpec = new RSAPrivateKeySpec ( b1 , b2 ) ; return ( RSAPrivateKey ) keyFactory . generatePrivate ( keySpec ) ; } catch ( Exception e ) { e . printStackTrace ( ) ; return null ; }
|
public class CPFriendlyURLEntryPersistenceImpl { /** * Removes all the cp friendly url entries where classNameId = & # 63 ; and classPK = & # 63 ; from the database .
* @ param classNameId the class name ID
* @ param classPK the class pk */
@ Override public void removeByC_C ( long classNameId , long classPK ) { } }
|
for ( CPFriendlyURLEntry cpFriendlyURLEntry : findByC_C ( classNameId , classPK , QueryUtil . ALL_POS , QueryUtil . ALL_POS , null ) ) { remove ( cpFriendlyURLEntry ) ; }
|
public class CommercePriceListUtil { /** * Returns the first commerce price list in the ordered set where uuid = & # 63 ; and companyId = & # 63 ; .
* @ param uuid the uuid
* @ param companyId the company ID
* @ param orderByComparator the comparator to order the set by ( optionally < code > null < / code > )
* @ return the first matching commerce price list , or < code > null < / code > if a matching commerce price list could not be found */
public static CommercePriceList fetchByUuid_C_First ( String uuid , long companyId , OrderByComparator < CommercePriceList > orderByComparator ) { } }
|
return getPersistence ( ) . fetchByUuid_C_First ( uuid , companyId , orderByComparator ) ;
|
public class ObjFileLoader { /** * Loads a . obj file , storing the data in the provided lists . After loading , the input stream will be closed . The number of components for each attribute is returned in a Vector3 , x being the
* number of position components , y the number of normal components and z the number of texture coord components . Note that normal and / or texture coord attributes might be missing from the . obj
* file . If this is the case , their lists will be empty . Passing null lists for the texture coords or normals will result in no loading of their data . The indices are stored in the indices list .
* @ param stream The input stream for the . obj file
* @ param positions The list in which to store the positions
* @ param normals The list in which to store the normals or null to ignore them
* @ param textureCoords The list in which to store the texture coords
* @ param indices The list in which to store the indices or null to ignore them
* @ return A Vector3 containing , in order , the number of components for the positions , normals and texture coords
* @ throws MalformedObjFileException If any errors occur during loading */
public static Vector3i load ( InputStream stream , TFloatList positions , TFloatList normals , TFloatList textureCoords , TIntList indices ) { } }
|
int positionSize = - 1 ; final TFloatList rawTextureCoords = new TFloatArrayList ( ) ; int textureCoordSize = - 1 ; final TFloatList rawNormalComponents = new TFloatArrayList ( ) ; int normalSize = - 1 ; final TIntList textureCoordIndices = new TIntArrayList ( ) ; final TIntList normalIndices = new TIntArrayList ( ) ; String line = null ; try ( Scanner scanner = new Scanner ( stream ) ) { while ( scanner . hasNextLine ( ) ) { line = scanner . nextLine ( ) ; if ( line . startsWith ( POSITION_LIST_PREFIX + COMPONENT_SEPARATOR ) ) { parseComponents ( positions , line ) ; if ( positionSize == - 1 ) { positionSize = positions . size ( ) ; } } else if ( textureCoords != null && line . startsWith ( TEXTURE_LIST_PREFIX + COMPONENT_SEPARATOR ) ) { parseComponents ( rawTextureCoords , line ) ; if ( textureCoordSize == - 1 ) { textureCoordSize = rawTextureCoords . size ( ) ; } } else if ( normals != null && line . startsWith ( NORMAL_LIST_PREFIX + COMPONENT_SEPARATOR ) ) { parseComponents ( rawNormalComponents , line ) ; if ( normalSize == - 1 ) { normalSize = rawNormalComponents . size ( ) ; } } else if ( line . startsWith ( INDEX_LIST_PREFIX + COMPONENT_SEPARATOR ) ) { parseIndices ( indices , textureCoordIndices , normalIndices , line ) ; } } line = null ; final boolean hasTextureCoords ; final boolean hasNormals ; if ( ! textureCoordIndices . isEmpty ( ) && ! rawTextureCoords . isEmpty ( ) ) { textureCoords . fill ( 0 , positions . size ( ) / positionSize * textureCoordSize , 0 ) ; hasTextureCoords = true ; } else { hasTextureCoords = false ; } if ( ! normalIndices . isEmpty ( ) && ! rawNormalComponents . isEmpty ( ) ) { normals . fill ( 0 , positions . size ( ) / positionSize * normalSize , 0 ) ; hasNormals = true ; } else { hasNormals = false ; } if ( hasTextureCoords ) { for ( int i = 0 ; i < textureCoordIndices . size ( ) ; i ++ ) { final int textureCoordIndex = textureCoordIndices . get ( i ) * textureCoordSize ; final int positionIndex = indices . get ( i ) * textureCoordSize ; for ( int ii = 0 ; ii < textureCoordSize ; ii ++ ) { textureCoords . set ( positionIndex + ii , rawTextureCoords . get ( textureCoordIndex + ii ) ) ; } } } if ( hasNormals ) { for ( int i = 0 ; i < normalIndices . size ( ) ; i ++ ) { final int normalIndex = normalIndices . get ( i ) * normalSize ; final int positionIndex = indices . get ( i ) * normalSize ; for ( int ii = 0 ; ii < normalSize ; ii ++ ) { normals . set ( positionIndex + ii , rawNormalComponents . get ( normalIndex + ii ) ) ; } } } } catch ( Exception ex ) { throw new MalformedObjFileException ( line , ex ) ; } return new Vector3i ( positionSize , normalSize , textureCoordSize ) . max ( 0 , 0 , 0 ) ;
|
public class HeaderCell { /** * Render the header cell ' s contents . This method sets the style information on the HTML
* th tag and then calls the
* { @ link # renderHeaderCellContents ( org . apache . beehive . netui . tags . rendering . AbstractRenderAppender , String ) }
* method to render the contents of the cell .
* @ param appender the { @ link AbstractRenderAppender } to which the output from this tag should be added
* @ throws IOException
* @ throws JspException */
protected void renderCell ( AbstractRenderAppender appender ) throws IOException , JspException { } }
|
DataGridTagModel dataGridModel = DataGridUtil . getDataGridTagModel ( getJspContext ( ) ) ; assert dataGridModel != null ; TableRenderer tableRenderer = dataGridModel . getTableRenderer ( ) ; assert tableRenderer != null ; ArrayList /* < String > */
styleClasses = new ArrayList /* < String > */
( ) ; /* todo : refactor . add a chain of style decorators here ; easier to extend this way . */
FilterModel filterModel = dataGridModel . getState ( ) . getFilterModel ( ) ; if ( filterModel . isFiltered ( _headerCellModel . getFilterExpression ( ) ) ) styleClasses . add ( dataGridModel . getStyleModel ( ) . getHeaderCellFilteredClass ( ) ) ; SortModel sortModel = dataGridModel . getState ( ) . getSortModel ( ) ; if ( sortModel . isSorted ( _headerCellModel . getSortExpression ( ) ) ) styleClasses . add ( dataGridModel . getStyleModel ( ) . getHeaderCellSortedClass ( ) ) ; if ( _headerCellModel . isSortable ( ) ) styleClasses . add ( dataGridModel . getStyleModel ( ) . getHeaderCellSortableClass ( ) ) ; if ( _cellState . styleClass == null ) styleClasses . add ( dataGridModel . getStyleModel ( ) . getHeaderCellClass ( ) ) ; else styleClasses . add ( _cellState . styleClass ) ; _cellState . styleClass = dataGridModel . getStyleModel ( ) . buildStyleClassValue ( styleClasses ) ; JspFragment fragment = getJspBody ( ) ; StringWriter sw = new StringWriter ( ) ; String jspFragmentOutput = null ; if ( fragment != null ) { fragment . invoke ( sw ) ; jspFragmentOutput = sw . toString ( ) ; } tableRenderer . openHeaderCell ( _cellState , appender ) ; renderHeaderCellContents ( appender , jspFragmentOutput ) ; tableRenderer . closeHeaderCell ( appender ) ; /* render any JavaScript needed to support framework features */
if ( _cellState . id != null ) { HttpServletRequest request = JspUtil . getRequest ( getJspContext ( ) ) ; String script = renderNameAndId ( request , _cellState , null ) ; if ( script != null ) appender . append ( script ) ; }
|
public class SimplePurview { /** * Returns < code > true < / code > if the given purview in the delegate purview
* @ param values
* @ return */
public boolean has ( int ... values ) { } }
|
long p = pur ( values [ 0 ] ) ; for ( int i = 1 , l = values . length ; i < l ; i ++ ) { p += pur ( values [ i ] ) ; } return p == ( this . delegate & p ) ;
|
public class ObjectEditorTable { /** * Insert the specified element at the specified row . */
public void insertDatum ( Object element , int row ) { } }
|
_data . add ( row , element ) ; _model . fireTableRowsInserted ( row , row ) ;
|
public class UserProfileHandlerImpl { /** * { @ inheritDoc } */
public UserProfile findUserProfileByName ( String userName ) throws Exception { } }
|
UserProfile profile = getFromCache ( userName ) ; if ( profile != null ) { return profile ; } Session session = service . getStorageSession ( ) ; try { profile = readProfile ( session , userName ) ; if ( profile != null ) { putInCache ( profile ) ; } } finally { session . logout ( ) ; } return profile ;
|
public class MemoryManager { /** * Allocates a set of memory segments from this memory manager . If the memory manager pre - allocated the
* segments , they will be taken from the pool of memory segments . Otherwise , they will be allocated
* as part of this call .
* @ param owner The owner to associate with the memory segment , for the fallback release .
* @ param target The list into which to put the allocated memory pages .
* @ param numPages The number of pages to allocate .
* @ throws MemoryAllocationException Thrown , if this memory manager does not have the requested amount
* of memory pages any more . */
public void allocatePages ( Object owner , List < MemorySegment > target , int numPages ) throws MemoryAllocationException { } }
|
// sanity check
if ( owner == null ) { throw new IllegalArgumentException ( "The memory owner must not be null." ) ; } // reserve array space , if applicable
if ( target instanceof ArrayList ) { ( ( ArrayList < MemorySegment > ) target ) . ensureCapacity ( numPages ) ; } // - - - - - BEGIN CRITICAL SECTION - - - - -
synchronized ( lock ) { if ( isShutDown ) { throw new IllegalStateException ( "Memory manager has been shut down." ) ; } // in the case of pre - allocated memory , the ' numNonAllocatedPages ' is zero , in the
// lazy case , the ' freeSegments . size ( ) ' is zero .
if ( numPages > ( memoryPool . getNumberOfAvailableMemorySegments ( ) + numNonAllocatedPages ) ) { throw new MemoryAllocationException ( "Could not allocate " + numPages + " pages. Only " + ( memoryPool . getNumberOfAvailableMemorySegments ( ) + numNonAllocatedPages ) + " pages are remaining." ) ; } Set < MemorySegment > segmentsForOwner = allocatedSegments . get ( owner ) ; if ( segmentsForOwner == null ) { segmentsForOwner = new HashSet < MemorySegment > ( numPages ) ; allocatedSegments . put ( owner , segmentsForOwner ) ; } if ( isPreAllocated ) { for ( int i = numPages ; i > 0 ; i -- ) { MemorySegment segment = memoryPool . requestSegmentFromPool ( owner ) ; target . add ( segment ) ; segmentsForOwner . add ( segment ) ; } } else { for ( int i = numPages ; i > 0 ; i -- ) { MemorySegment segment = memoryPool . allocateNewSegment ( owner ) ; target . add ( segment ) ; segmentsForOwner . add ( segment ) ; } numNonAllocatedPages -= numPages ; } } // - - - - - END CRITICAL SECTION - - - - -
|
public class ConcurrentDateFormatAccess { /** * Convert date to string .
* @ param date the date
* @ return the string */
public String convertDateToString ( final Date date ) { } }
|
if ( date == null ) { return null ; } return dateFormat . get ( ) . format ( date ) ;
|
public class AmazonRedshiftClient { /** * Modifies the maintenance settings of a cluster . For example , you can defer a maintenance window . You can also
* update or cancel a deferment .
* @ param modifyClusterMaintenanceRequest
* @ return Result of the ModifyClusterMaintenance operation returned by the service .
* @ throws ClusterNotFoundException
* The < code > ClusterIdentifier < / code > parameter does not refer to an existing cluster .
* @ sample AmazonRedshift . ModifyClusterMaintenance
* @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / redshift - 2012-12-01 / ModifyClusterMaintenance "
* target = " _ top " > AWS API Documentation < / a > */
@ Override public Cluster modifyClusterMaintenance ( ModifyClusterMaintenanceRequest request ) { } }
|
request = beforeClientExecution ( request ) ; return executeModifyClusterMaintenance ( request ) ;
|
public class PrimitiveTransformation { /** * < code > . google . privacy . dlp . v2 . CharacterMaskConfig character _ mask _ config = 3 ; < / code > */
public com . google . privacy . dlp . v2 . CharacterMaskConfig getCharacterMaskConfig ( ) { } }
|
if ( transformationCase_ == 3 ) { return ( com . google . privacy . dlp . v2 . CharacterMaskConfig ) transformation_ ; } return com . google . privacy . dlp . v2 . CharacterMaskConfig . getDefaultInstance ( ) ;
|
public class DataLabelingServiceClient { /** * Formats a string containing the fully - qualified path to represent a data _ item resource .
* @ deprecated Use the { @ link DataItemName } class instead . */
@ Deprecated public static final String formatDataItemName ( String project , String dataset , String dataItem ) { } }
|
return DATA_ITEM_PATH_TEMPLATE . instantiate ( "project" , project , "dataset" , dataset , "data_item" , dataItem ) ;
|
public class StatementExecutor { /** * Delete an object from the database by id . */
public int deleteById ( DatabaseConnection databaseConnection , ID id , ObjectCache objectCache ) throws SQLException { } }
|
if ( mappedDelete == null ) { mappedDelete = MappedDelete . build ( dao , tableInfo ) ; } int result = mappedDelete . deleteById ( databaseConnection , id , objectCache ) ; if ( dao != null && ! localIsInBatchMode . get ( ) ) { dao . notifyChanges ( ) ; } return result ;
|
public class HttpContext { /** * Execute a PUT call against the partial URL .
* @ param < T > The type parameter used for the return object
* @ param partialUrl The partial URL to build
* @ param payload The object to use for the PUT
* @ param headers A set of headers to add to the request
* @ param queryParams A set of query parameters to add to the request
* @ param returnType The expected return type
* @ return The return type */
public < T > Optional < T > PUT ( String partialUrl , Object payload , Map < String , Object > headers , List < String > queryParams , GenericType < T > returnType ) { } }
|
URI uri = buildUri ( partialUrl ) ; return executePutRequest ( uri , payload , headers , queryParams , returnType ) ;
|
public class ConfigManager { /** * Reload config file if it hasn ' t been loaded in a while
* Returns true if the file was reloaded . */
public synchronized boolean reloadConfigsIfNecessary ( ) { } }
|
long time = RaidNode . now ( ) ; if ( time > lastReloadAttempt + reloadInterval ) { lastReloadAttempt = time ; try { File file = new File ( configFileName ) ; long lastModified = file . lastModified ( ) ; if ( lastModified > lastSuccessfulReload && time > lastModified + RELOAD_WAIT ) { reloadConfigs ( ) ; lastSuccessfulReload = time ; lastReloadAttemptFailed = false ; return true ; } } catch ( Exception e ) { if ( ! lastReloadAttemptFailed ) { LOG . error ( "Failed to reload config file - " + "will use existing configuration." , e ) ; } lastReloadAttemptFailed = true ; } } return false ;
|
public class MemcachedSessionService { /** * Check if the valid session associated with the provided
* requested session Id will be relocated with the next { @ link # backupSession ( Session , boolean ) }
* and change the session id to the new one ( containing the new memcached node ) . The
* new session id must be returned if the session will be relocated and the id was changed .
* @ param requestedSessionId
* the sessionId that was requested .
* @ return the new session id if the session will be relocated and the id was changed .
* Otherwise < code > null < / code > .
* @ see Request # getRequestedSessionId ( ) */
public String changeSessionIdOnMemcachedFailover ( final String requestedSessionId ) { } }
|
if ( ! _memcachedNodesManager . isEncodeNodeIdInSessionId ( ) ) { return null ; } try { if ( _sticky ) { /* We can just lookup the session in the local session map , as we wouldn ' t get
* the session from memcached if the node was not available - or , the other way round ,
* if we would get the session from memcached , the session would not have to be relocated . */
final MemcachedBackupSession session = _manager . getSessionInternal ( requestedSessionId ) ; if ( session != null && session . isValid ( ) ) { final String newSessionId = _memcachedNodesManager . getNewSessionIdIfNodeFromSessionIdUnavailable ( session . getId ( ) ) ; if ( newSessionId != null ) { _log . debug ( "Session needs to be relocated, setting new id on session..." ) ; session . setIdForRelocate ( newSessionId ) ; _statistics . requestWithMemcachedFailover ( ) ; return newSessionId ; } } } else { /* for non - sticky sessions we check the validity info */
final String nodeId = getSessionIdFormat ( ) . extractMemcachedId ( requestedSessionId ) ; if ( nodeId == null || _memcachedNodesManager . isNodeAvailable ( nodeId ) ) { return null ; } _log . info ( "Session needs to be relocated as node " + nodeId + " is not available, loading backup session for " + requestedSessionId ) ; final MemcachedBackupSession backupSession = loadBackupSession ( requestedSessionId ) ; if ( backupSession != null ) { _log . debug ( "Loaded backup session for " + requestedSessionId + ", adding locally with " + backupSession . getIdInternal ( ) + "." ) ; addValidLoadedSession ( backupSession , true ) ; _statistics . requestWithMemcachedFailover ( ) ; return backupSession . getId ( ) ; } } } catch ( final RuntimeException e ) { _log . warn ( "Could not find session in local session map." , e ) ; } return null ;
|
public class TileSetRuleSet { /** * Adds the necessary rules to the digester to parse our tilesets .
* Derived classes should override this method , being sure to call the
* superclass method and then adding their own rule instances ( which
* should register themselves relative to the < code > _ prefix < / code >
* member ) . */
@ Override public void addRuleInstances ( Digester digester ) { } }
|
// this creates the appropriate instance when we encounter a
// < tileset > tag
digester . addObjectCreate ( _path , getTileSetClass ( ) . getName ( ) ) ; // grab the name attribute from the < tileset > tag
digester . addSetProperties ( _path ) ; // grab the image path from an element
digester . addCallMethod ( _path + "/imagePath" , "setImagePath" , 0 ) ;
|
public class CommercePriceListAccountRelPersistenceImpl { /** * Creates a new commerce price list account rel with the primary key . Does not add the commerce price list account rel to the database .
* @ param commercePriceListAccountRelId the primary key for the new commerce price list account rel
* @ return the new commerce price list account rel */
@ Override public CommercePriceListAccountRel create ( long commercePriceListAccountRelId ) { } }
|
CommercePriceListAccountRel commercePriceListAccountRel = new CommercePriceListAccountRelImpl ( ) ; commercePriceListAccountRel . setNew ( true ) ; commercePriceListAccountRel . setPrimaryKey ( commercePriceListAccountRelId ) ; String uuid = PortalUUIDUtil . generate ( ) ; commercePriceListAccountRel . setUuid ( uuid ) ; commercePriceListAccountRel . setCompanyId ( companyProvider . getCompanyId ( ) ) ; return commercePriceListAccountRel ;
|
public class Conference { /** * Gets list all members from a conference . If a member had already hung up or removed from conference it will be displayed as completed .
* @ return list of members
* @ throws IOException unexpected error . */
public List < ConferenceMember > getMembers ( ) throws Exception { } }
|
final String membersPath = StringUtils . join ( new String [ ] { getUri ( ) , "members" } , '/' ) ; final JSONArray array = toJSONArray ( client . get ( membersPath , null ) ) ; final List < ConferenceMember > members = new ArrayList < ConferenceMember > ( ) ; for ( final Object obj : array ) { members . add ( new ConferenceMember ( client , ( JSONObject ) obj ) ) ; } return members ;
|
public class DeploymentsInner { /** * Validates whether the specified template is syntactically correct and will be accepted by Azure Resource Manager . .
* @ param resourceGroupName The name of the resource group the template will be deployed to . The name is case insensitive .
* @ param deploymentName The name of the deployment .
* @ param properties The deployment properties .
* @ param serviceCallback the async ServiceCallback to handle successful and failed responses .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the { @ link ServiceFuture } object */
public ServiceFuture < DeploymentValidateResultInner > validateAsync ( String resourceGroupName , String deploymentName , DeploymentProperties properties , final ServiceCallback < DeploymentValidateResultInner > serviceCallback ) { } }
|
return ServiceFuture . fromResponse ( validateWithServiceResponseAsync ( resourceGroupName , deploymentName , properties ) , serviceCallback ) ;
|
public class GoogleConnector { /** * Requests the user info for the given account . This requires previous
* authorization from the user , so this might start the process .
* @ param accountId
* The id of the account to get the user info .
* @ return The user info bean .
* @ throws IOException If the account cannot be accessed . */
GoogleAccount getAccountInfo ( String accountId ) throws IOException { } }
|
Credential credential = impl_getStoredCredential ( accountId ) ; if ( credential == null ) { throw new UnsupportedOperationException ( "The account has not been authorized yet!" ) ; } Userinfoplus info = impl_requestUserInfo ( credential ) ; GoogleAccount account = new GoogleAccount ( ) ; account . setId ( accountId ) ; account . setName ( info . getName ( ) ) ; return account ;
|
public class JDBCInputFormat { /** * Closes all resources used .
* @ throws IOException Indicates that a resource could not be closed . */
@ Override public void close ( ) throws IOException { } }
|
if ( resultSet == null ) { return ; } try { resultSet . close ( ) ; } catch ( SQLException se ) { LOG . info ( "Inputformat ResultSet couldn't be closed - " + se . getMessage ( ) ) ; }
|
public class CommsServerByteBuffer { /** * Reads the data for a transaction .
* @ param connectionObjectId
* @ param linkState
* @ param txOptimized
* @ return Returns the identifier for the transaction in the link level state transaction table
* ( or the value CommsUtils . NO _ TRANSACTION if there was no transaction ) . */
public synchronized int getSITransactionId ( int connectionObjectId , ServerLinkLevelState linkState , boolean txOptimized ) { } }
|
if ( tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "getSITransactionId" , new Object [ ] { "" + connectionObjectId , linkState , "" + txOptimized } ) ; final int transactionId ; int transactionFlags = - 1 ; if ( txOptimized ) { // Read the flags BIT32 from the data .
transactionFlags = getInt ( ) ; // Check transacted bit to verify whether this flow is transacted or not .
if ( ( transactionFlags & CommsConstants . OPTIMIZED_TX_FLAGS_TRANSACTED_BIT ) == 0 ) { transactionId = CommsConstants . NO_TRANSACTION ; } else { int owningConvId = getInt ( ) ; // Read the transaction identifier BIT32.
transactionId = getInt ( ) ; if ( tc . isDebugEnabled ( ) ) { SibTr . debug ( this , tc , "Transaction Flags" , "" + transactionFlags ) ; SibTr . debug ( this , tc , "Owning Conversation Id" , "" + owningConvId ) ; SibTr . debug ( this , tc , "Transaction Id" , "" + transactionId ) ; } // Check the flags to determine if we need to create a new transaction or not .
if ( ( transactionFlags & CommsConstants . OPTIMIZED_TX_FLAGS_CREATE_BIT ) != 0 ) { // Check the flags to determine if we are creating a local or global
// transaction .
if ( ( transactionFlags & CommsConstants . OPTIMIZED_TX_FLAGS_LOCAL_BIT ) != 0 ) { // local tran - determine if subordinates are allowed
final boolean allowSubordinates = ( transactionFlags & CommsConstants . OPTIMIZED_TX_FLAGS_SUBORDINATES_ALLOWED ) != 0 ; try { SIUncoordinatedTransaction uctran = linkState . getSICoreConnectionTable ( ) . get ( owningConvId ) . createUncoordinatedTransaction ( allowSubordinates ) ; linkState . getTransactionTable ( ) . addLocalTran ( transactionId , owningConvId , uctran ) ; } catch ( SIException e ) { // No FFDC Code Needed
if ( tc . isEventEnabled ( ) ) SibTr . exception ( tc , e ) ; linkState . getTransactionTable ( ) . addLocalTran ( transactionId , owningConvId , IdToTransactionTable . INVALID_TRANSACTION ) ; linkState . getTransactionTable ( ) . markAsRollbackOnly ( transactionId , e ) ; } } else { // global tran
SIXAResource xaRes = null ; XidProxy xidProxy = null ; try { // Check the flags to determine if the XAResource needs end invoking on
// it before a new transaction can be started .
if ( ( transactionFlags & CommsConstants . OPTIMIZED_TX_END_PREVIOUS_BIT ) != 0 ) { int endFlags = getInt ( ) ; linkState . getTransactionTable ( ) . endOptimizedGlobalTransactionBranch ( transactionId , endFlags ) ; xaRes = ( SIXAResource ) linkState . getTransactionTable ( ) . get ( transactionId ) ; } else { xaRes = linkState . getSICoreConnectionTable ( ) . get ( owningConvId ) . getSIXAResource ( ) ; } xidProxy = ( XidProxy ) getXid ( ) ; if ( tc . isDebugEnabled ( ) ) SibTr . debug ( tc , "xidProxy" , xidProxy ) ; xaRes . start ( xidProxy , SIXAResource . TMNOFLAGS ) ; linkState . getTransactionTable ( ) . addGlobalTransactionBranch ( transactionId , owningConvId , xaRes , xidProxy , true ) ; } catch ( SIException e ) { // No FFDC Code Needed
if ( tc . isEventEnabled ( ) ) SibTr . exception ( tc , e ) ; linkState . getTransactionTable ( ) . addGlobalTransactionBranch ( transactionId , owningConvId , IdToTransactionTable . INVALID_TRANSACTION , xidProxy , true ) ; linkState . getTransactionTable ( ) . markAsRollbackOnly ( transactionId , e ) ; } catch ( XAException e ) { // No FFDC Code Needed
if ( tc . isEventEnabled ( ) ) SibTr . exception ( tc , e ) ; linkState . getTransactionTable ( ) . addGlobalTransactionBranch ( transactionId , owningConvId , IdToTransactionTable . INVALID_TRANSACTION , xidProxy , true ) ; linkState . getTransactionTable ( ) . markAsRollbackOnly ( transactionId , e ) ; } } } } } else { // Not an optimized transaction . Simply find its id .
transactionId = getInt ( ) ; if ( tc . isDebugEnabled ( ) ) SibTr . debug ( tc , "transactionId" , "" + transactionId ) ; } if ( TraceComponent . isAnyTracingEnabled ( ) ) { Object commsTx = null ; if ( transactionId != CommsConstants . NO_TRANSACTION ) commsTx = linkState . getTransactionTable ( ) . get ( transactionId , true ) ; CommsLightTrace . traceTransaction ( tc , "GetTxnTrace" , commsTx , transactionId , transactionFlags ) ; } if ( tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "getSITransactionId" , "" + transactionId ) ; return transactionId ;
|
public class SchemaUsageAnalyzer { /** * Extracts a numeric id from a string , which can be either a Wikidata
* entity URI or a short entity or property id .
* @ param idString
* @ param isUri
* @ return numeric id , or 0 if there was an error */
private Integer getNumId ( String idString , boolean isUri ) { } }
|
String numString ; if ( isUri ) { if ( ! idString . startsWith ( "http://www.wikidata.org/entity/" ) ) { return 0 ; } numString = idString . substring ( "http://www.wikidata.org/entity/Q" . length ( ) ) ; } else { numString = idString . substring ( 1 ) ; } return Integer . parseInt ( numString ) ;
|
public class FlowMeter { /** * Takes a sample of the request rates . Calculations are based on
* differences in request counts since the last call to
* < code > sample ( ) < / code > .
* @ return an array of three < code > doubles < / code > : total requests per
* second , successful requests per second , failed requests per
* second . If this is the first sample , all three rates will be
* reported as zero requests per second . */
public synchronized double [ ] sample ( ) { } }
|
long [ ] currCounts = counter . sample ( ) ; long now = System . currentTimeMillis ( ) ; if ( lastSampleMillis != 0 ) { long deltaTime = now - lastSampleMillis ; if ( deltaTime == 0 ) return lastKnownRates ; lastKnownRates [ 0 ] = rate ( currCounts [ 0 ] - lastTotal , deltaTime ) ; lastKnownRates [ 1 ] = rate ( currCounts [ 1 ] - lastSuccesses , deltaTime ) ; lastKnownRates [ 2 ] = rate ( currCounts [ 2 ] - lastFailures , deltaTime ) ; } else { lastKnownRates [ 0 ] = lastKnownRates [ 1 ] = lastKnownRates [ 2 ] = 0.0 ; } lastTotal = currCounts [ 0 ] ; lastSuccesses = currCounts [ 1 ] ; lastFailures = currCounts [ 2 ] ; lastSampleMillis = now ; return lastKnownRates ;
|
public class DescribeDBInstancesResult { /** * Detailed information about one or more DB instances .
* @ param dBInstances
* Detailed information about one or more DB instances . */
public void setDBInstances ( java . util . Collection < DBInstance > dBInstances ) { } }
|
if ( dBInstances == null ) { this . dBInstances = null ; return ; } this . dBInstances = new java . util . ArrayList < DBInstance > ( dBInstances ) ;
|
public class DbPro { /** * Batch save records using the " insert into . . . " sql generated by the first record in recordList .
* Ensure all the record can use the same sql as the first record .
* @ param tableName the table name */
public int [ ] batchSave ( String tableName , List < Record > recordList , int batchSize ) { } }
|
if ( recordList == null || recordList . size ( ) == 0 ) return new int [ 0 ] ; Record record = recordList . get ( 0 ) ; Map < String , Object > cols = record . getColumns ( ) ; int index = 0 ; StringBuilder columns = new StringBuilder ( ) ; // the same as the iterator in Dialect . forDbSave ( ) to ensure the order of the columns
for ( Entry < String , Object > e : cols . entrySet ( ) ) { if ( config . dialect . isOracle ( ) ) { // 支持 oracle 自增主键
Object value = e . getValue ( ) ; if ( value instanceof String && ( ( String ) value ) . endsWith ( ".nextval" ) ) { continue ; } } if ( index ++ > 0 ) { columns . append ( ',' ) ; } columns . append ( e . getKey ( ) ) ; } String [ ] pKeysNoUse = new String [ 0 ] ; StringBuilder sql = new StringBuilder ( ) ; List < Object > parasNoUse = new ArrayList < Object > ( ) ; config . dialect . forDbSave ( tableName , pKeysNoUse , record , sql , parasNoUse ) ; return batch ( sql . toString ( ) , columns . toString ( ) , recordList , batchSize ) ;
|
public class Util { /** * Utility method to format algorithms name in Java like way
* @ param mode
* @ param padding
* @ return string name with the formatted algorithm */
public static String formatter ( Mode mode , Padding padding ) { } }
|
return String . format ( "%s/%s" , mode , padding ) ;
|
public class LdapConsentRepository { /** * Modifies the consent decisions attribute on the entry .
* @ param newConsent new set of consent decisions
* @ param entry entry of consent decisions
* @ return true / false */
private boolean executeModifyOperation ( final Set < String > newConsent , final LdapEntry entry ) { } }
|
val attrMap = new HashMap < String , Set < String > > ( ) ; attrMap . put ( this . ldap . getConsentAttributeName ( ) , newConsent ) ; LOGGER . debug ( "Storing consent decisions [{}] at LDAP attribute [{}] for [{}]" , newConsent , attrMap . keySet ( ) , entry . getDn ( ) ) ; return LdapUtils . executeModifyOperation ( entry . getDn ( ) , this . connectionFactory , CollectionUtils . wrap ( attrMap ) ) ;
|
public class Vectors { /** * Creates a sum vector accumulator that calculates the sum of all elements in the vector .
* @ param neutral the neutral value
* @ return a sum accumulator */
public static VectorAccumulator asSumAccumulator ( final double neutral ) { } }
|
return new VectorAccumulator ( ) { private BigDecimal result = BigDecimal . valueOf ( neutral ) ; @ Override public void update ( int i , double value ) { result = result . add ( BigDecimal . valueOf ( value ) ) ; } @ Override public double accumulate ( ) { double value = result . setScale ( Vectors . ROUND_FACTOR , RoundingMode . CEILING ) . doubleValue ( ) ; result = BigDecimal . valueOf ( neutral ) ; return value ; } } ;
|
public class QueryBuilder { /** * Creates a new { @ code TRUNCATE } query .
* < p > This is a shortcut for { @ link # truncate ( CqlIdentifier , CqlIdentifier )
* truncate ( CqlIdentifier . fromCql ( keyspace ) , CqlIdentifier . fromCql ( table ) ) } .
* @ param keyspace the name of the keyspace to use .
* @ param table the name of the table to truncate .
* @ return the truncation query . */
public static Truncate truncate ( @ Nullable String keyspace , @ NonNull String table ) { } }
|
return truncate ( keyspace == null ? null : CqlIdentifier . fromCql ( keyspace ) , CqlIdentifier . fromCql ( table ) ) ;
|
public class JsfFaceletScannerPlugin { /** * Try to find an existing { @ link JsfFaceletDescriptor } with the given
* parameters .
* @ param templateFqn
* full qualified name of the including file
* @ param path
* the found path to the included file
* @ param context
* The scanner context
* @ return an existing { @ link JsfFaceletDescriptor } or a new one if no
* descriptor exists . */
private JsfFaceletDescriptor findJsfTemplateDescriptor ( String templateFqn , String path , ScannerContext context ) { } }
|
String includedFile = absolutifyFilePath ( path , templateFqn ) ; return getJsfTemplateDescriptor ( includedFile , context ) ;
|
public class Util { /** * delegate to your to an ExceptionAction and wraps checked exceptions in RuntimExceptions , leaving unchecked exceptions alone .
* @ return A
* @ throws Either Error or RuntimeException . Error on Errors in doAction and RuntimeException if doError throws a RuntimeException or an Exception . In the latter case
* the RuntimeException will contain the original Exception as its cause */
public static < A > A execute ( ExceptionAction < A > action ) { } }
|
try { return action . doAction ( ) ; } catch ( RuntimeException e ) { throw e ; } catch ( Error e ) { throw e ; } catch ( Throwable e ) { throw new RuntimeException ( e ) ; }
|
public class SRTUpgradeOutputStream31 { /** * @ see javax . servlet . ServletOutputStream # println ( boolean ) */
public void println ( boolean b ) throws IOException { } }
|
if ( this . _listener != null && ! checkIfCalledFromWLonError ( ) ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) Tr . debug ( tc , "non blocking println boolean , WriteListener enabled: " + this . _listener ) ; _outHelper . println_NonBlocking ( Boolean . toString ( b ) ) ; } else { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) Tr . debug ( tc , "println boolean" ) ; String value = Boolean . toString ( b ) ; synchronized ( this ) { this . write ( value . getBytes ( ) , 0 , value . length ( ) ) ; this . write ( CRLF , 0 , 2 ) ; } }
|
public class CallbackThread { @ Override public void run ( ) { } }
|
// System . out . println ( " I am in thread ! " ) ;
// All info are in AsyncCallObjec . It does everything itself
try { aco . manage_reply ( 0 ) ; } catch ( final Exception e ) { System . err . println ( e ) ; }
|
public class SamlIdPObjectEncrypter { /** * Encode encrypted id .
* @ param samlObject the saml object
* @ param service the service
* @ param adaptor the adaptor
* @ return the encrypted id */
@ SneakyThrows public EncryptedID encode ( final NameID samlObject , final SamlRegisteredService service , final SamlRegisteredServiceServiceProviderMetadataFacade adaptor ) { } }
|
val encrypter = buildEncrypterForSamlObject ( samlObject , service , adaptor ) ; return encrypter . encrypt ( samlObject ) ;
|
public class TrackerClient { /** * Fire the new peer discovery event to all listeners .
* @ param peers The list of peers discovered . */
protected void fireDiscoveredPeersEvent ( List < Peer > peers , String hexInfoHash ) { } }
|
for ( AnnounceResponseListener listener : this . listeners ) { listener . handleDiscoveredPeers ( peers , hexInfoHash ) ; }
|
public class Quaterniond { /** * Set this quaternion to be a representation of the supplied axis and
* angle ( in radians ) .
* @ param axis
* the rotation axis
* @ param angle
* the angle in radians
* @ return this */
public Quaterniond fromAxisAngleRad ( Vector3dc axis , double angle ) { } }
|
return fromAxisAngleRad ( axis . x ( ) , axis . y ( ) , axis . z ( ) , angle ) ;
|
public class Monitoring { /** * < pre >
* Monitoring configurations for sending metrics to the producer project .
* There can be multiple producer destinations . A monitored resouce type may
* appear in multiple monitoring destinations if different aggregations are
* needed for different sets of metrics associated with that monitored
* resource type . A monitored resource and metric pair may only be used once
* in the Monitoring configuration .
* < / pre >
* < code > repeated . google . api . Monitoring . MonitoringDestination producer _ destinations = 1 ; < / code > */
public com . google . api . Monitoring . MonitoringDestinationOrBuilder getProducerDestinationsOrBuilder ( int index ) { } }
|
return producerDestinations_ . get ( index ) ;
|
public class ArrayTrie { @ Override public V get ( String s , int offset , int len ) { } }
|
int t = 0 ; for ( int i = 0 ; i < len ; i ++ ) { char c = s . charAt ( offset + i ) ; int index = __lookup [ c & 0x7f ] ; if ( index >= 0 ) { int idx = t * ROW_SIZE + index ; t = _rowIndex [ idx ] ; if ( t == 0 ) return null ; } else { char [ ] big = _bigIndex == null ? null : _bigIndex [ t ] ; if ( big == null ) return null ; t = big [ c ] ; if ( t == 0 ) return null ; } } return _value [ t ] ;
|
public class Messages { /** * Create constant name .
* @ param state STATE _ UNCHANGED , STATE _ CHANGED , STATE _ NEW or STATE _ DELETED .
* @ return cconstanname as String */
public static String getStateKey ( CmsResourceState state ) { } }
|
StringBuffer sb = new StringBuffer ( STATE_PREFIX ) ; sb . append ( state ) ; sb . append ( STATE_POSTFIX ) ; return sb . toString ( ) ;
|
public class ListTagsForStreamRequestMarshaller { /** * Marshall the given parameter object . */
public void marshall ( ListTagsForStreamRequest listTagsForStreamRequest , ProtocolMarshaller protocolMarshaller ) { } }
|
if ( listTagsForStreamRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( listTagsForStreamRequest . getStreamName ( ) , STREAMNAME_BINDING ) ; protocolMarshaller . marshall ( listTagsForStreamRequest . getExclusiveStartTagKey ( ) , EXCLUSIVESTARTTAGKEY_BINDING ) ; protocolMarshaller . marshall ( listTagsForStreamRequest . getLimit ( ) , LIMIT_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
|
public class HourRanges { /** * Adds some hour ranges to this instance and returns a new one . < br >
* < br >
* It is only allowed to call this method if the hour ranges represents only one day . This means a value like ' 18:00-03:00 ' will lead to
* an error . To avoid this , call the { @ link # normalize ( ) } function before this one and pass the result per day as an argument to this
* method .
* @ param other
* Ranges to add .
* @ return New instance with added times . */
@ NotNull public final HourRanges add ( @ NotNull final HourRanges other ) { } }
|
Contract . requireArgNotNull ( "other" , other ) ; ensureSingleDayOnly ( "this" , this ) ; ensureSingleDayOnly ( "other" , other ) ; final BitSet thisMinutes = this . toMinutes ( ) ; final BitSet otherMinutes = other . toMinutes ( ) ; thisMinutes . or ( otherMinutes ) ; return HourRanges . valueOf ( thisMinutes ) ;
|
public class MapBuilder { /** * Returns a new MapBuilder with the given entry . */
public static < K , V > MapBuilder < K , V > of ( K key , V value ) { } }
|
return new MapBuilder < K , V > ( ) . put ( key , value ) ;
|
public class ScanIterator { /** * Sequentially iterate over keys in the keyspace . This method uses { @ code SCAN } to perform an iterative scan .
* @ param commands the commands interface , must not be { @ literal null } .
* @ param scanArgs the scan arguments , must not be { @ literal null } .
* @ param < K > Key type .
* @ param < V > Value type .
* @ return a new { @ link ScanIterator } . */
public static < K , V > ScanIterator < K > scan ( RedisKeyCommands < K , V > commands , ScanArgs scanArgs ) { } }
|
LettuceAssert . notNull ( scanArgs , "ScanArgs must not be null" ) ; return scan ( commands , Optional . of ( scanArgs ) ) ;
|
public class XMLDocumentCache { /** * Retrieve the currently cached value for the given document . */
public static Result get ( XmlFileModel key ) { } }
|
String cacheKey = getKey ( key ) ; Result result = null ; CacheDocument reference = map . get ( cacheKey ) ; if ( reference == null ) return new Result ( false , null ) ; if ( reference . parseFailure ) return new Result ( true , null ) ; Document document = reference . getDocument ( ) ; if ( document == null ) LOG . info ( "Cache miss on XML document: " + cacheKey ) ; return new Result ( false , document ) ;
|
public class SwaggerBuilder { /** * Register an Operation ' s parameters .
* @ param swagger
* @ param operation
* @ param route
* @ param method
* @ return the registered Swagger URI for the operation */
protected String registerParameters ( Swagger swagger , Operation operation , Route route , Method method ) { } }
|
Map < String , Object > pathParameterPlaceholders = new HashMap < > ( ) ; for ( String uriParameterName : getUriParameterNames ( route . getUriPattern ( ) ) ) { // path parameters are required
PathParameter pathParameter = new PathParameter ( ) ; pathParameter . setName ( uriParameterName ) ; setPropertyType ( swagger , pathParameter , method ) ; pathParameter . setRequired ( true ) ; operation . addParameter ( pathParameter ) ; pathParameterPlaceholders . put ( uriParameterName , "{" + uriParameterName + "}" ) ; } // identify body , header , query , & form parameters
for ( Parameter methodParameter : method . getParameters ( ) ) { String methodParameterName = getParameterName ( methodParameter ) ; if ( pathParameterPlaceholders . containsKey ( methodParameterName ) ) { // path parameter already accounted for
continue ; } if ( methodParameter . isAnnotationPresent ( Local . class ) ) { // ignore parameter
continue ; } if ( methodParameter . isAnnotationPresent ( Session . class ) ) { // ignore parameter
continue ; } if ( methodParameter . isAnnotationPresent ( Auth . class ) ) { // ignore parameter
continue ; } if ( methodParameter . isAnnotationPresent ( Body . class ) ) { // BODY
BodyParameter bodyParameter = new BodyParameter ( ) ; bodyParameter . setName ( methodParameterName ) ; bodyParameter . setDescription ( getDescription ( methodParameter ) ) ; bodyParameter . setRequired ( true ) ; if ( methodParameter . getType ( ) . isArray ( ) ) { // ARRAY [ ]
Property property = getSwaggerProperty ( swagger , methodParameter . getType ( ) . getComponentType ( ) ) ; ArrayModel arrayModel = new ArrayModel ( ) ; arrayModel . setItems ( property ) ; bodyParameter . setSchema ( arrayModel ) ; } else if ( Collection . class . isAssignableFrom ( methodParameter . getType ( ) ) ) { // COLLECTION
Class < ? > componentClass = ClassUtil . getParameterGenericType ( method , methodParameter ) ; Property property = getSwaggerProperty ( swagger , componentClass ) ; ArrayModel arrayModel = new ArrayModel ( ) ; arrayModel . setItems ( property ) ; bodyParameter . setSchema ( arrayModel ) ; } else { // OBJECT
Property property = getSwaggerProperty ( swagger , methodParameter . getType ( ) ) ; if ( property instanceof RefProperty ) { // Domain Model
RefProperty ref = ( RefProperty ) property ; bodyParameter . setSchema ( new RefModel ( ref . getSimpleRef ( ) ) ) ; } else { // Primitive Type
ModelImpl model = new ModelImpl ( ) ; model . setType ( property . getType ( ) ) ; model . setFormat ( property . getFormat ( ) ) ; bodyParameter . setSchema ( model ) ; } } operation . addParameter ( bodyParameter ) ; } else if ( methodParameter . isAnnotationPresent ( Header . class ) ) { // HEADER
Header header = methodParameter . getAnnotation ( Header . class ) ; HeaderParameter headerParameter = new HeaderParameter ( ) ; if ( Strings . isNullOrEmpty ( header . value ( ) ) ) { headerParameter . setName ( methodParameterName ) ; } else { headerParameter . setName ( header . value ( ) ) ; } headerParameter . setDescription ( getDescription ( methodParameter ) ) ; setPropertyType ( swagger , headerParameter , method ) ; operation . addParameter ( headerParameter ) ; } else if ( methodParameter . isAnnotationPresent ( Form . class ) || FileItem . class == methodParameter . getType ( ) ) { // FORM
FormParameter formParameter = new FormParameter ( ) ; formParameter . setName ( methodParameterName ) ; formParameter . setDescription ( getDescription ( methodParameter ) ) ; setPropertyType ( swagger , formParameter , method ) ; operation . addParameter ( formParameter ) ; if ( FileItem . class == methodParameter . getType ( ) ) { // if we see a FileItem , then this MUST be a multipart POST
operation . setConsumes ( Arrays . asList ( HttpConstants . ContentType . MULTIPART_FORM_DATA ) ) ; } else if ( ! operation . getConsumes ( ) . contains ( HttpConstants . ContentType . MULTIPART_FORM_DATA ) ) { // only override consumes if this is NOT a multipart POST
operation . setConsumes ( Arrays . asList ( HttpConstants . ContentType . APPLICATION_FORM_URLENCODED ) ) ; } } else { // QUERY
QueryParameter queryParameter = new QueryParameter ( ) ; queryParameter . setName ( methodParameterName ) ; queryParameter . setDescription ( getDescription ( methodParameter ) ) ; setPropertyType ( swagger , queryParameter , method ) ; operation . addParameter ( queryParameter ) ; } } // we need to rewrite the uripattern without regex for Swagger
// e . g . / employee / { id : 0-9 + } must be rewritten as / employee / { id }
String swaggerUri = router . uriFor ( route . getUriPattern ( ) , pathParameterPlaceholders ) ; return swaggerUri ;
|
public class AfplibPackageImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
public EClass getCFC ( ) { } }
|
if ( cfcEClass == null ) { cfcEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( AfplibPackage . eNS_URI ) . getEClassifiers ( ) . get ( 226 ) ; } return cfcEClass ;
|
public class AbstractColumnFamilyOutputFormat { /** * Fills the deprecated OutputFormat interface for streaming . */
@ Deprecated public void checkOutputSpecs ( org . apache . hadoop . fs . FileSystem filesystem , org . apache . hadoop . mapred . JobConf job ) throws IOException { } }
|
checkOutputSpecs ( job ) ;
|
public class ModuleMetadataDatabaseImpl { /** * { @ inheritDoc } */
public List < MetadataDatabase > getMetadadaDatabasesSharedFolders ( ResponseListNetworkSharedFolder responseNetworkSharedFolders ) { } }
|
// Init the list
List < MetadataDatabase > databases = new ArrayList < MetadataDatabase > ( ) ; if ( null != responseNetworkSharedFolders ) { for ( ObjectNetworkShare networkShare : responseNetworkSharedFolders . getNetworkShares ( ) ) { try { MetadataDatabase database = getDatabase ( networkShare . getUrl ( ) ) ; databases . add ( database ) ; logger . debug ( database . toString ( ) ) ; } catch ( TheDavidBoxClientException e ) { logger . warn ( "Not found metadata database for network share url: " + networkShare . getUrl ( ) ) ; } } } return databases ;
|
public class PathUtils { /** * Copies a directory .
* NOTE : This method is not thread - safe .
* Most of the implementation is thanks to
* http : / / stackoverflow . com / questions / 17641706 / how - to - copy - a - directory - with - its - attributes - permissions - from - one
* - location - to - ano / 18691793#18691793
* @ param sourceDirectory
* the directory to copy from
* @ param targetDirectory
* the directory to copy into
* @ throws IOException
* if an I / O error occurs */
public static void copyDirectory ( Path sourceDirectory , Path targetDirectory ) throws IOException { } }
|
copyDirectory ( sourceDirectory , targetDirectory , emptySet ( ) ) ;
|
public class Log4JULogger { /** * { @ inheritDoc } */
public void debug ( final Object parameterizedMsg , final Object param1 ) { } }
|
if ( logger . isDebugEnabled ( ) ) { logger . debug ( MessageFormatter . format ( parameterizedMsg . toString ( ) , param1 ) ) ; }
|
public class BundleAdjustmentMetricResidualFunction { /** * projection from 3D coordinates */
private void project3 ( double [ ] output ) { } }
|
int observationIndex = 0 ; for ( int viewIndex = 0 ; viewIndex < structure . views . length ; viewIndex ++ ) { SceneStructureMetric . View view = structure . views [ viewIndex ] ; SceneStructureMetric . Camera camera = structure . cameras [ view . camera ] ; // = = = = = Project General Points in this View
{ SceneObservations . View obsView = observations . views [ viewIndex ] ; for ( int i = 0 ; i < obsView . size ( ) ; i ++ ) { obsView . get ( i , observedPixel ) ; SceneStructureMetric . Point worldPt = structure . points [ observedPixel . index ] ; worldPt . get ( p3 ) ; SePointOps_F64 . transform ( view . worldToView , p3 , cameraPt ) ; camera . model . project ( cameraPt . x , cameraPt . y , cameraPt . z , predictedPixel ) ; int outputIndex = observationIndex * 2 ; output [ outputIndex ] = predictedPixel . x - observedPixel . x ; output [ outputIndex + 1 ] = predictedPixel . y - observedPixel . y ; observationIndex ++ ; } } // = = = = = Project Rigid Object Points in this View
if ( observations . viewsRigid != null ) { SceneObservations . View obsView = observations . viewsRigid [ viewIndex ] ; for ( int i = 0 ; i < obsView . size ( ) ; i ++ ) { obsView . get ( i , observedPixel ) ; // Use lookup table to figure out which rigid object it belongs to
int rigidIndex = structure . lookupRigid [ observedPixel . index ] ; SceneStructureMetric . Rigid rigid = structure . rigids [ rigidIndex ] ; // Compute the point ' s index on the rigid object
int pointIndex = observedPixel . index - rigid . indexFirst ; // Load the 3D location of point on the rigid body
SceneStructureMetric . Point objectPt = rigid . points [ pointIndex ] ; objectPt . get ( p3 ) ; // Tranform to world frame and from world to camera
SePointOps_F64 . transform ( rigid . objectToWorld , p3 , worldPt ) ; SePointOps_F64 . transform ( view . worldToView , worldPt , cameraPt ) ; // Project and compute residual
camera . model . project ( cameraPt . x , cameraPt . y , cameraPt . z , predictedPixel ) ; int outputIndex = observationIndex * 2 ; output [ outputIndex ] = predictedPixel . x - observedPixel . x ; output [ outputIndex + 1 ] = predictedPixel . y - observedPixel . y ; observationIndex ++ ; } } }
|
public class Primitive { /** * Unwrap Primitive wrappers to their java . lang wrapper values .
* e . g . Primitive ( 42 ) becomes Integer ( 42)
* @ see # unwrap ( Object ) */
public static Object [ ] unwrap ( Object [ ] args ) { } }
|
if ( args == null ) return null ; Object [ ] oa = new Object [ args . length ] ; for ( int i = 0 ; i < args . length ; i ++ ) oa [ i ] = unwrap ( args [ i ] ) ; return oa ;
|
public class SQLiteConnection { /** * Collects statistics about database connection memory usage .
* @ param dbStatsList The list to populate . */
void collectDbStats ( ArrayList < com . couchbase . lite . internal . database . sqlite . SQLiteDebug . DbStats > dbStatsList ) { } }
|
// Get information about the main database .
int lookaside = nativeGetDbLookaside ( mConnectionPtr ) ; long pageCount = 0 ; long pageSize = 0 ; try { pageCount = executeForLong ( "PRAGMA page_count;" , null , null ) ; pageSize = executeForLong ( "PRAGMA page_size;" , null , null ) ; } catch ( com . couchbase . lite . internal . database . sqlite . exception . SQLiteException ex ) { // Ignore .
} dbStatsList . add ( getMainDbStatsUnsafe ( lookaside , pageCount , pageSize ) ) ;
|
public class HelpCommand { /** * { @ inheritDoc } */
@ Override int execute ( OptionsAndArgs pOpts , Object pVm , VirtualMachineHandler pHandler ) throws InvocationTargetException , NoSuchMethodException , IllegalAccessException { } }
|
printUsage ( ) ; return 0 ;
|
public class JavaSoundPlayer { /** * Use the gain control to implement volume . */
protected static void adjustVolume ( Line line , float vol ) { } }
|
FloatControl control = ( FloatControl ) line . getControl ( FloatControl . Type . MASTER_GAIN ) ; // the only problem is that gain is specified in decibals , which is a logarithmic scale .
// Since we want max volume to leave the sample unchanged , our
// maximum volume translates into a 0db gain .
float gain ; if ( vol == 0f ) { gain = control . getMinimum ( ) ; } else { gain = ( float ) ( ( Math . log ( vol ) / Math . log ( 10.0 ) ) * 20.0 ) ; } control . setValue ( gain ) ; // Log . info ( " Set gain : " + gain ) ;
|
public class FileHelper { /** * Returns < code > true < / code > if the first file is newer than the second file .
* Returns < code > true < / code > if the first file exists and the second file does
* not exist . Returns < code > false < / code > if the first file is older than the
* second file . Returns < code > false < / code > if the first file does not exists
* but the second does . Returns < code > false < / code > if none of the files exist .
* @ param aFile1
* First file . May not be < code > null < / code > .
* @ param aFile2
* Second file . May not be < code > null < / code > .
* @ return < code > true < / code > if the first file is newer than the second file ,
* < code > false < / code > otherwise . */
public static boolean isFileNewer ( @ Nonnull final File aFile1 , @ Nonnull final File aFile2 ) { } }
|
ValueEnforcer . notNull ( aFile1 , "File1" ) ; ValueEnforcer . notNull ( aFile2 , "aFile2" ) ; // Compare with the same file ?
if ( aFile1 . equals ( aFile2 ) ) return false ; // if the first file does not exists , always false
if ( ! aFile1 . exists ( ) ) return false ; // first file exists , but second file does not
if ( ! aFile2 . exists ( ) ) return true ; // both exist , compare file times
return aFile1 . lastModified ( ) > aFile2 . lastModified ( ) ;
|
public class AbstractMonteCarloProduct { /** * This method returns the value under shifted market data ( or model parameters ) .
* In its default implementation it does bump ( creating a new model ) and revalue .
* Override the way the new model is created , to implemented improved techniques ( proxy scheme , re - calibration ) .
* @ param model The model used to price the product , except for the market data to modify
* @ param dataModified The new market data object to use ( could be of different types )
* @ return The values of the product .
* @ throws net . finmath . exception . CalculationException Thrown if the valuation fails , specific cause may be available via the < code > cause ( ) < / code > method . */
public Map < String , Object > getValuesForModifiedData ( MonteCarloSimulationInterface model , Map < String , Object > dataModified ) throws CalculationException { } }
|
return getValuesForModifiedData ( 0.0 , model , dataModified ) ;
|
public class StorableIndex { /** * Returns a new array with all the properties in it , with directions
* folded in . */
@ SuppressWarnings ( "unchecked" ) public OrderedProperty < S > [ ] getOrderedProperties ( ) { } }
|
OrderedProperty < S > [ ] ordered = new OrderedProperty [ mProperties . length ] ; for ( int i = mProperties . length ; -- i >= 0 ; ) { ordered [ i ] = OrderedProperty . get ( mProperties [ i ] , mDirections [ i ] ) ; } return ordered ;
|
public class MapTileApproximater { /** * Approximate a tile from a lower zoom level
* @ since 6.0.0
* @ param pProvider Source tile provider
* @ param pMapTileIndex Destination tile , for the same place on the planet as the source , but on a higher zoom
* @ param pZoomDiff Zoom level difference between the destination and the source ; strictly positive
* @ return */
public static Bitmap approximateTileFromLowerZoom ( final MapTileModuleProviderBase pProvider , final long pMapTileIndex , final int pZoomDiff ) { } }
|
if ( pZoomDiff <= 0 ) { return null ; } final int srcZoomLevel = MapTileIndex . getZoom ( pMapTileIndex ) - pZoomDiff ; if ( srcZoomLevel < pProvider . getMinimumZoomLevel ( ) ) { return null ; } if ( srcZoomLevel > pProvider . getMaximumZoomLevel ( ) ) { return null ; } final long srcTile = MapTileIndex . getTileIndex ( srcZoomLevel , MapTileIndex . getX ( pMapTileIndex ) >> pZoomDiff , MapTileIndex . getY ( pMapTileIndex ) >> pZoomDiff ) ; try { final Drawable srcDrawable = pProvider . getTileLoader ( ) . loadTile ( srcTile ) ; if ( ! ( srcDrawable instanceof BitmapDrawable ) ) { return null ; } return approximateTileFromLowerZoom ( ( BitmapDrawable ) srcDrawable , pMapTileIndex , pZoomDiff ) ; } catch ( Exception e ) { return null ; }
|
public class CommerceRegionLocalServiceWrapper { /** * Returns a range of all the commerce regions .
* Useful when paginating results . Returns a maximum of < code > end - start < / code > instances . < code > start < / code > and < code > end < / code > are not primary keys , they are indexes in the result set . Thus , < code > 0 < / code > refers to the first result in the set . Setting both < code > start < / code > and < code > end < / code > to { @ link com . liferay . portal . kernel . dao . orm . QueryUtil # ALL _ POS } will return the full result set . If < code > orderByComparator < / code > is specified , then the query will include the given ORDER BY logic . If < code > orderByComparator < / code > is absent and pagination is required ( < code > start < / code > and < code > end < / code > are not { @ link com . liferay . portal . kernel . dao . orm . QueryUtil # ALL _ POS } ) , then the query will include the default ORDER BY logic from { @ link com . liferay . commerce . model . impl . CommerceRegionModelImpl } . If both < code > orderByComparator < / code > and pagination are absent , for performance reasons , the query will not have an ORDER BY clause and the returned result set will be sorted on by the primary key in an ascending order .
* @ param start the lower bound of the range of commerce regions
* @ param end the upper bound of the range of commerce regions ( not inclusive )
* @ return the range of commerce regions */
@ Override public java . util . List < com . liferay . commerce . model . CommerceRegion > getCommerceRegions ( int start , int end ) { } }
|
return _commerceRegionLocalService . getCommerceRegions ( start , end ) ;
|
public class DateUtils { /** * Converts an RFC3339 formatted Date String to a Java Date RFC3339 format : yyyy - MM - dd HH : mm : ss
* @ param rfc3339FormattedDate RFC3339 formatted Date
* @ return an { @ link Date } object
* @ throws InvalidFormatException the RFC3339 formatted Date is invalid or cannot be parsed .
* @ see < a href = " https : / / tools . ietf . org / html / rfc3339 " > The Internet Society - RFC 3339 < / a > */
public static Date fromRfc3339DateString ( String rfc3339FormattedDate ) throws InvalidFormatException { } }
|
SimpleDateFormat rfc3339DateFormat = new SimpleDateFormat ( "yyyy-MM-dd HH:mm:ss" ) ; try { return rfc3339DateFormat . parse ( rfc3339FormattedDate ) ; } catch ( ParseException e ) { throw new InvalidFormatException ( "Error parsing as date" , rfc3339FormattedDate , Date . class ) ; }
|
public class FileUtils { /** * Guava predicates and functions */
public static Predicate < File > isDirectoryPredicate ( ) { } }
|
return new Predicate < File > ( ) { @ Override public boolean apply ( final File input ) { return input . isDirectory ( ) ; } } ;
|
public class RotationAxisAligner { /** * Returns a vector perpendicular to the principal rotation vector
* for the alignment of structures in the xy - plane
* @ return reference vector */
private void calcReferenceVector ( ) { } }
|
referenceVector = null ; if ( rotationGroup . getPointGroup ( ) . startsWith ( "C" ) ) { referenceVector = getReferenceAxisCylic ( ) ; } else if ( rotationGroup . getPointGroup ( ) . startsWith ( "D" ) ) { referenceVector = getReferenceAxisDihedral ( ) ; } else if ( rotationGroup . getPointGroup ( ) . equals ( "T" ) ) { referenceVector = getReferenceAxisTetrahedral ( ) ; } else if ( rotationGroup . getPointGroup ( ) . equals ( "O" ) ) { referenceVector = getReferenceAxisOctahedral ( ) ; } else if ( rotationGroup . getPointGroup ( ) . equals ( "I" ) ) { referenceVector = getReferenceAxisIcosahedral ( ) ; } else if ( rotationGroup . getPointGroup ( ) . equals ( "Helical" ) ) { // TODO what should the reference vector be ? ?
referenceVector = getReferenceAxisCylic ( ) ; } if ( referenceVector == null ) { logger . warn ( "no reference vector found. Using y-axis." ) ; referenceVector = new Vector3d ( Y_AXIS ) ; } // make sure reference vector is perpendicular principal roation vector
referenceVector = orthogonalize ( principalRotationVector , referenceVector ) ;
|
public class PublicCardUrl { /** * Get Resource Url for Update
* @ param cardId Unique identifier of the card associated with the customer account billing contact .
* @ param responseFields Filtering syntax appended to an API call to increase or decrease the amount of data returned inside a JSON object . This parameter should only be used to retrieve data . Attempting to update data using this parameter may cause data loss .
* @ return String Resource Url */
public static MozuUrl updateUrl ( String cardId , String responseFields ) { } }
|
UrlFormatter formatter = new UrlFormatter ( "/payments/commerce/payments/cards/{cardId}?responseFields={responseFields}" ) ; formatter . formatUrl ( "cardId" , cardId ) ; formatter . formatUrl ( "responseFields" , responseFields ) ; return new MozuUrl ( formatter . getResourceUrl ( ) , MozuUrl . UrlLocation . PCI_POD ) ;
|
public class StreamingConnectionFactory { /** * Sets the connect timeout in the specified time unit .
* @ param connectTimeout the connectWait to set
* @ param unit the time unit to set */
public void setConnectTimeout ( long connectTimeout , TimeUnit unit ) { } }
|
this . connectTimeout = Duration . ofMillis ( unit . toMillis ( connectTimeout ) ) ;
|
public class MOEADD { /** * Calculate the norm of the vector */
public double norm_vector ( double [ ] z ) { } }
|
double sum = 0 ; for ( int i = 0 ; i < problem . getNumberOfObjectives ( ) ; i ++ ) { sum += z [ i ] * z [ i ] ; } return Math . sqrt ( sum ) ;
|
public class WebAppConfiguratorHelper { /** * To configure JMS Destinations
* @ param jmsDestinations */
private void configureJMSDestinations ( List < JMSDestination > jmsDestinations ) { } }
|
Map < String , ConfigItem < JMSDestination > > jmsDestinationConfigItemMap = configurator . getConfigItemMap ( JNDIEnvironmentRefType . JMSConnectionFactory . getXMLElementName ( ) ) ; for ( JMSDestination jmsDestination : jmsDestinations ) { String name = jmsDestination . getName ( ) ; if ( name == null ) { continue ; } ConfigItem < JMSDestination > existedCF = jmsDestinationConfigItemMap . get ( name ) ; if ( existedCF == null ) { jmsDestinationConfigItemMap . put ( name , createConfigItem ( jmsDestination , JMS_DESTINATION_COMPARATOR ) ) ; webAppConfiguration . addRef ( JNDIEnvironmentRefType . JMSDestination , jmsDestination ) ; } else { if ( existedCF . getSource ( ) == ConfigSource . WEB_XML && configurator . getConfigSource ( ) == ConfigSource . WEB_FRAGMENT ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "{0}.{1} with value {2} is configured in web.xml, the value {3} from web-fragment.xml in {4} is ignored" , JNDIEnvironmentRefType . JMSConnectionFactory . getXMLElementName ( ) , "name" , existedCF . getValue ( ) , name , configurator . getLibraryURI ( ) ) ; } } else if ( existedCF . getSource ( ) == ConfigSource . WEB_FRAGMENT && configurator . getConfigSource ( ) == ConfigSource . WEB_FRAGMENT && ! existedCF . compareValue ( jmsDestination ) ) { configurator . addErrorMessage ( nls . getFormattedMessage ( "CONFLICT_JMS_DESTINATION_REFERENCE_BETWEEN_WEB_FRAGMENT_XML" , new Object [ ] { name , existedCF . getLibraryURI ( ) , this . configurator . getLibraryURI ( ) } , "Two " + JNDIEnvironmentRefType . JMSConnectionFactory . getXMLElementName ( ) + " configurations with the same name {0} found in the web-fragment.xml of {1} and {2}." ) ) ; } } }
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.