signature stringlengths 43 39.1k | implementation stringlengths 0 450k |
|---|---|
public class Cursor { /** * Synchronized . Move the cursor down to the next entry
* in the list and return it .
* @ return The next entry in the list , or null if there is no next entry . */
public synchronized Entry next ( ) { } } | if ( tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "next" ) ; // can only do anything if the cursor is still pointing in to a list
checkEntryParent ( ) ; Entry nextEntry = null ; synchronized ( parentList ) { // get the next entry in the list
nextEntry = getNextEntry ( ) ; // if the next entry is null
if ( nextEntry == null ) { // then hopefully we ' re at the end of the list
if ( current == parentList . last ) { // so move the cursor to the bottom of the list ,
// not pointing to any actual entry
moveToBottom ( ) ; } else if ( ! atBottom ) { // it should not be possible for the next entry to be null but the current
// not be the last one in the list or already at the bottom
SIErrorException e = new SIErrorException ( nls . getFormattedMessage ( "INTERNAL_MESSAGING_ERROR_CWSIP0001" , new Object [ ] { "Cursor" , "1:160:1.15" } , null ) ) ; FFDCFilter . processException ( e , "com.ibm.ws.sib.processor.utils.linkedlist.Cursor.next" , "1:166:1.15" , this ) ; SibTr . exception ( tc , e ) ; if ( tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "next" , e ) ; throw e ; } } else { // move the cursor to the next entry
moveCursor ( nextEntry ) ; } } if ( tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "next" , nextEntry ) ; return nextEntry ; |
public class ConfigurationUtils { /** * Returns an instance of { @ link AlluxioConfiguration } with the defaults and values from
* alluxio - site properties .
* @ return the set of Alluxio properties loaded from the site - properties file */
public static AlluxioProperties defaults ( ) { } } | if ( sDefaultProperties == null ) { synchronized ( DEFAULT_PROPERTIES_LOCK ) { // We don ' t want multiple threads to reload
// properties at the same time .
// Check if properties are still null so we don ' t reload a second time .
if ( sDefaultProperties == null ) { reloadProperties ( ) ; } } } return sDefaultProperties . copy ( ) ; |
public class ReflectionUtils { /** * < p > getAllFields . < / p >
* @ param type a { @ link java . lang . Class } object .
* @ return a { @ link java . util . List } object . */
public static List < Field > getAllFields ( Class < ? > type ) { } } | List < Field > fields = new ArrayList < > ( ) ; for ( Class < ? > currentType = type ; currentType != null ; currentType = currentType . getSuperclass ( ) ) { fields . addAll ( Arrays . asList ( currentType . getDeclaredFields ( ) ) ) ; } return fields ; |
public class AnimaQuery { /** * Build a delete statement .
* @ param model model instance
* @ param < S >
* @ return delete sql */
private < S extends Model > String buildDeleteSQL ( S model ) { } } | SQLParams sqlParams = SQLParams . builder ( ) . model ( model ) . modelClass ( this . modelClass ) . tableName ( this . tableName ) . pkName ( this . primaryKeyColumn ) . conditionSQL ( this . conditionSQL ) . build ( ) ; return Anima . of ( ) . dialect ( ) . delete ( sqlParams ) ; |
public class CmsXmlSitemapGenerator { /** * Gets the list of pages which should be directly added to the XML sitemap . < p >
* @ return the list of resources which should be directly added to the XML sitemap
* @ throws CmsException if something goes wrong */
protected List < CmsResource > getDirectPages ( ) throws CmsException { } } | List < CmsResource > result = new ArrayList < CmsResource > ( ) ; result . addAll ( getNavigationPages ( ) ) ; Set < String > includeRoots = m_includeExcludeSet . getIncludeRoots ( ) ; for ( String includeRoot : includeRoots ) { try { CmsResource resource = m_guestCms . readResource ( includeRoot ) ; if ( resource . isFile ( ) ) { result . add ( resource ) ; } else { List < CmsResource > subtreeFiles = m_guestCms . readResources ( includeRoot , CmsResourceFilter . DEFAULT_FILES , true ) ; result . addAll ( subtreeFiles ) ; } } catch ( CmsVfsResourceNotFoundException e ) { LOG . warn ( "Could not read include resource: " + includeRoot ) ; } } Iterator < CmsResource > filterIter = result . iterator ( ) ; while ( filterIter . hasNext ( ) ) { CmsResource currentResource = filterIter . next ( ) ; if ( currentResource . isInternal ( ) || m_includeExcludeSet . isExcluded ( currentResource . getRootPath ( ) ) ) { filterIter . remove ( ) ; } } return result ; |
public class EConv { /** * / * rb _ econv _ convert0 */
private EConvResult convertInternal ( byte [ ] in , Ptr inPtr , int inStop , byte [ ] out , Ptr outPtr , int outStop , int flags ) { } } | lastError . reset ( ) ; EConvResult res ; int len ; if ( numTranscoders == 0 ) { if ( inBuf . bytes != null && inBuf . dataStart != inBuf . dataEnd ) { if ( outStop - outPtr . p < inBuf . dataEnd - inBuf . dataStart ) { len = outStop - outPtr . p ; System . arraycopy ( inBuf . bytes , inBuf . dataStart , out , outPtr . p , len ) ; outPtr . p = outStop ; inBuf . dataStart += len ; return convertInternalResult ( EConvResult . DestinationBufferFull , null ) ; } len = inBuf . dataEnd - inBuf . dataStart ; System . arraycopy ( inBuf . bytes , inBuf . dataStart , out , outPtr . p , len ) ; outPtr . p += len ; inBuf . dataStart = inBuf . dataEnd = inBuf . bufStart ; if ( ( flags & AFTER_OUTPUT ) != 0 ) return convertInternalResult ( EConvResult . AfterOutput , null ) ; } if ( outStop - outPtr . p < inStop - inPtr . p ) { len = outStop - outPtr . p ; } else { len = inStop - inPtr . p ; } if ( len > 0 && ( flags & AFTER_OUTPUT ) != 0 ) { out [ outPtr . p ++ ] = in [ inPtr . p ++ ] ; return convertInternalResult ( EConvResult . AfterOutput , null ) ; } System . arraycopy ( in , inPtr . p , out , outPtr . p , len ) ; outPtr . p += len ; inPtr . p += len ; if ( inPtr . p != inStop ) { res = EConvResult . DestinationBufferFull ; } else if ( ( flags & PARTIAL_INPUT ) != 0 ) { res = EConvResult . SourceBufferEmpty ; } else { res = EConvResult . Finished ; } return convertInternalResult ( res , null ) ; } boolean hasOutput = false ; EConvElement elem = elements [ numTranscoders - 1 ] ; if ( elem . bytes != null ) { int dataStart = elem . dataStart ; int dataEnd = elem . dataEnd ; byte [ ] data = elem . bytes ; if ( dataStart != dataEnd ) { if ( outStop - outPtr . p < dataEnd - dataStart ) { len = outStop - outPtr . p ; System . arraycopy ( data , dataStart , out , outPtr . p , len ) ; outPtr . p = outStop ; elem . dataStart += len ; return convertInternalResult ( EConvResult . DestinationBufferFull , null ) ; } len = dataEnd - dataStart ; System . arraycopy ( data , dataStart , out , outPtr . p , len ) ; outPtr . p += len ; elem . dataStart = elem . dataEnd = elem . bufStart ; hasOutput = true ; } } Ptr resultPosition = new Ptr ( 0 ) ; if ( inBuf != null && inBuf . dataStart != inBuf . dataEnd ) { Ptr inDataStartPtr = new Ptr ( inBuf . dataStart ) ; res = transConv ( inBuf . bytes , inDataStartPtr , inBuf . dataEnd , out , outPtr , outStop , ( flags & ~ AFTER_OUTPUT ) | PARTIAL_INPUT , resultPosition ) ; inBuf . dataStart = inDataStartPtr . p ; if ( ! res . isSourceBufferEmpty ( ) ) return convertInternalResult ( EConvResult . SourceBufferEmpty , resultPosition ) ; } if ( hasOutput && ( flags & AFTER_OUTPUT ) != 0 && inPtr . p != inStop ) { inStop = inPtr . p ; res = transConv ( in , inPtr , inStop , out , outPtr , outStop , flags , resultPosition ) ; if ( res . isSourceBufferEmpty ( ) ) res = EConvResult . AfterOutput ; } else if ( ( flags & AFTER_OUTPUT ) != 0 || numTranscoders == 1 ) { res = transConv ( in , inPtr , inStop , out , outPtr , outStop , flags , resultPosition ) ; } else { flags |= AFTER_OUTPUT ; do { res = transConv ( in , inPtr , inStop , out , outPtr , outStop , flags , resultPosition ) ; } while ( res . isAfterOutput ( ) ) ; } return convertInternalResult ( res , resultPosition ) ; |
public class CommandMarshaller { /** * Marshall the given parameter object . */
public void marshall ( Command command , ProtocolMarshaller protocolMarshaller ) { } } | if ( command == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( command . getCommandId ( ) , COMMANDID_BINDING ) ; protocolMarshaller . marshall ( command . getDocumentName ( ) , DOCUMENTNAME_BINDING ) ; protocolMarshaller . marshall ( command . getDocumentVersion ( ) , DOCUMENTVERSION_BINDING ) ; protocolMarshaller . marshall ( command . getComment ( ) , COMMENT_BINDING ) ; protocolMarshaller . marshall ( command . getExpiresAfter ( ) , EXPIRESAFTER_BINDING ) ; protocolMarshaller . marshall ( command . getParameters ( ) , PARAMETERS_BINDING ) ; protocolMarshaller . marshall ( command . getInstanceIds ( ) , INSTANCEIDS_BINDING ) ; protocolMarshaller . marshall ( command . getTargets ( ) , TARGETS_BINDING ) ; protocolMarshaller . marshall ( command . getRequestedDateTime ( ) , REQUESTEDDATETIME_BINDING ) ; protocolMarshaller . marshall ( command . getStatus ( ) , STATUS_BINDING ) ; protocolMarshaller . marshall ( command . getStatusDetails ( ) , STATUSDETAILS_BINDING ) ; protocolMarshaller . marshall ( command . getOutputS3Region ( ) , OUTPUTS3REGION_BINDING ) ; protocolMarshaller . marshall ( command . getOutputS3BucketName ( ) , OUTPUTS3BUCKETNAME_BINDING ) ; protocolMarshaller . marshall ( command . getOutputS3KeyPrefix ( ) , OUTPUTS3KEYPREFIX_BINDING ) ; protocolMarshaller . marshall ( command . getMaxConcurrency ( ) , MAXCONCURRENCY_BINDING ) ; protocolMarshaller . marshall ( command . getMaxErrors ( ) , MAXERRORS_BINDING ) ; protocolMarshaller . marshall ( command . getTargetCount ( ) , TARGETCOUNT_BINDING ) ; protocolMarshaller . marshall ( command . getCompletedCount ( ) , COMPLETEDCOUNT_BINDING ) ; protocolMarshaller . marshall ( command . getErrorCount ( ) , ERRORCOUNT_BINDING ) ; protocolMarshaller . marshall ( command . getDeliveryTimedOutCount ( ) , DELIVERYTIMEDOUTCOUNT_BINDING ) ; protocolMarshaller . marshall ( command . getServiceRole ( ) , SERVICEROLE_BINDING ) ; protocolMarshaller . marshall ( command . getNotificationConfig ( ) , NOTIFICATIONCONFIG_BINDING ) ; protocolMarshaller . marshall ( command . getCloudWatchOutputConfig ( ) , CLOUDWATCHOUTPUTCONFIG_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class BusHolder { /** * Update the Bus held by the this instance using the provided parameters .
* This basically prepares the bus for being used with JBossWS .
* @ param resolver The ResourceResolver to configure , if any
* @ param configurer The JBossWSCXFConfigurer to install in the bus , if any
* @ param wsmd The current JBossWebservicesMetaData , if any
* @ param dep The current deployment */
public void configure ( ResourceResolver resolver , Configurer configurer , JBossWebservicesMetaData wsmd , Deployment dep ) { } } | if ( configured ) { throw Messages . MESSAGES . busAlreadyConfigured ( bus ) ; } bus . setProperty ( org . jboss . wsf . stack . cxf . client . Constants . DEPLOYMENT_BUS , true ) ; busHolderListener = new BusHolderLifeCycleListener ( ) ; bus . getExtension ( BusLifeCycleManager . class ) . registerLifeCycleListener ( busHolderListener ) ; setWSDLManagerStreamWrapper ( bus ) ; if ( configurer != null ) { bus . setExtension ( configurer , Configurer . class ) ; } Map < String , String > props = getProperties ( wsmd ) ; setInterceptors ( bus , dep , props ) ; dep . addAttachment ( Bus . class , bus ) ; try { final JASPIAuthenticationProvider jaspiProvider = SPIProvider . getInstance ( ) . getSPI ( JASPIAuthenticationProvider . class , ClassLoaderProvider . getDefaultProvider ( ) . getServerIntegrationClassLoader ( ) ) ; if ( jaspiProvider != null && jaspiProvider . enableServerAuthentication ( dep , wsmd ) ) { bus . getInInterceptors ( ) . add ( new AuthenticationMgrSubjectCreatingInterceptor ( ) ) ; } } catch ( WSFException e ) { Loggers . DEPLOYMENT_LOGGER . cannotFindJaspiClasses ( ) ; } setResourceResolver ( bus , resolver ) ; if ( bus . getExtension ( PolicyEngine . class ) != null ) { bus . getExtension ( PolicyEngine . class ) . setAlternativeSelector ( getAlternativeSelector ( props ) ) ; } setCXFManagement ( bus , props ) ; // * first * enabled cxf management if required , * then * add anything else which could be manageable ( e . g . work queues )
setAdditionalWorkQueues ( bus , props ) ; setWSDiscovery ( bus , props ) ; AnnotationsInfo ai = dep . getAttachment ( AnnotationsInfo . class ) ; if ( ai == null || ai . hasAnnotatedClasses ( PolicySets . class . getName ( ) ) ) { policySetsListener = new PolicySetsAnnotationListener ( dep . getClassLoader ( ) ) ; bus . getExtension ( FactoryBeanListenerManager . class ) . addListener ( policySetsListener ) ; } // default to USE _ ORIGINAL _ THREAD = true ; this can be overridden by simply setting the property in the endpoint or in the message using an interceptor
// this forces one way operation to use original thread , which is required for ejb webserivce endpoints to avoid authorization failures from ejb container
// and is a performance improvement in general when running in - container , as CXF needs to cache the message to free the thread , which is expensive
// ( moreover the user can tune the web container thread pool instead of expecting cxf to fork new threads )
bus . setProperty ( OneWayProcessorInterceptor . USE_ORIGINAL_THREAD , true ) ; // [ JBWS - 3135 ] enable decoupled faultTo . This is an optional feature in cxf and we need this to be default to make it same behavior with native stack
bus . setProperty ( "org.apache.cxf.ws.addressing.decoupled_fault_support" , true ) ; FeatureUtils . addFeatures ( bus , bus , props ) ; for ( DDEndpoint dde : metadata . getEndpoints ( ) ) { EndpointImpl endpoint = new EndpointImpl ( bus , newInstance ( dde . getImplementor ( ) , dep ) ) ; if ( dde . getInvoker ( ) != null ) endpoint . setInvoker ( newInvokerInstance ( dde . getInvoker ( ) , dep ) ) ; endpoint . setAddress ( dde . getAddress ( ) ) ; endpoint . setEndpointName ( dde . getPortName ( ) ) ; endpoint . setServiceName ( dde . getServiceName ( ) ) ; endpoint . setWsdlLocation ( dde . getWsdlLocation ( ) ) ; setHandlers ( endpoint , dde , dep ) ; if ( dde . getProperties ( ) != null ) { Map < String , Object > p = new HashMap < String , Object > ( ) ; p . putAll ( dde . getProperties ( ) ) ; endpoint . setProperties ( p ) ; } if ( dde . isAddressingEnabled ( ) ) { WSAddressingFeature addressingFeature = new WSAddressingFeature ( ) ; addressingFeature . setAddressingRequired ( dde . isAddressingRequired ( ) ) ; addressingFeature . setResponses ( dde . getAddressingResponses ( ) ) ; endpoint . getFeatures ( ) . add ( addressingFeature ) ; } endpoint . setPublishedEndpointUrl ( dde . getPublishedEndpointUrl ( ) ) ; endpoint . setSOAPAddressRewriteMetadata ( dep . getAttachment ( SOAPAddressRewriteMetadata . class ) ) ; endpoint . publish ( ) ; endpoints . add ( endpoint ) ; if ( dde . isMtomEnabled ( ) ) { SOAPBinding binding = ( SOAPBinding ) endpoint . getBinding ( ) ; binding . setMTOMEnabled ( true ) ; } } configured = true ; |
public class Instant { /** * Returns a copy of this instant with the specified duration added .
* This instance is immutable and unaffected by this method call .
* @ param secondsToAdd the seconds to add , positive or negative
* @ param nanosToAdd the nanos to add , positive or negative
* @ return an { @ code Instant } based on this instant with the specified seconds added , not null
* @ throws DateTimeException if the result exceeds the maximum or minimum instant
* @ throws ArithmeticException if numeric overflow occurs */
private Instant plus ( long secondsToAdd , long nanosToAdd ) { } } | if ( ( secondsToAdd | nanosToAdd ) == 0 ) { return this ; } long epochSec = Jdk8Methods . safeAdd ( seconds , secondsToAdd ) ; epochSec = Jdk8Methods . safeAdd ( epochSec , nanosToAdd / NANOS_PER_SECOND ) ; nanosToAdd = nanosToAdd % NANOS_PER_SECOND ; long nanoAdjustment = nanos + nanosToAdd ; // safe int + NANOS _ PER _ SECOND
return ofEpochSecond ( epochSec , nanoAdjustment ) ; |
public class Currency { /** * Returns a Currency instance for the given currency code . */
public static Currency getInstance ( String currencyCode ) { } } | Currency currency = getInstanceNoCreate ( currencyCode . toUpperCase ( ) ) ; if ( currency == null ) { return createCurrency ( currencyCode . toUpperCase ( ) , null , null ) ; } else { return currency ; } |
public class ManagementClient { /** * Deletes the subscription described by the topicPath and the subscriptionName .
* @ param topicPath - The name of the topic .
* @ param subscriptionName - The name of the subscription .
* @ throws IllegalArgumentException - path is not null / empty / too long / invalid .
* @ throws TimeoutException - The operation times out . The timeout period is initiated through ClientSettings . operationTimeout
* @ throws AuthorizationFailedException - No sufficient permission to perform this operation . Please check ClientSettings . tokenProvider has correct details .
* @ throws ServerBusyException - The server is busy . You should wait before you retry the operation .
* @ throws ServiceBusException - An internal error or an unexpected exception occurred .
* @ throws MessagingEntityNotFoundException - An entity with this name does not exist .
* @ throws InterruptedException if the current thread was interrupted */
public Void deleteSubscription ( String topicPath , String subscriptionName ) throws ServiceBusException , InterruptedException { } } | return Utils . completeFuture ( this . asyncClient . deleteSubscriptionAsync ( topicPath , subscriptionName ) ) ; |
public class Ifc2x3tc1PackageImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
public EClass getIfcTorqueMeasure ( ) { } } | if ( ifcTorqueMeasureEClass == null ) { ifcTorqueMeasureEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( Ifc2x3tc1Package . eNS_URI ) . getEClassifiers ( ) . get ( 757 ) ; } return ifcTorqueMeasureEClass ; |
public class Ifc2x3tc1FactoryImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
public IfcDistributionChamberElementTypeEnum createIfcDistributionChamberElementTypeEnumFromString ( EDataType eDataType , String initialValue ) { } } | IfcDistributionChamberElementTypeEnum result = IfcDistributionChamberElementTypeEnum . get ( initialValue ) ; if ( result == null ) throw new IllegalArgumentException ( "The value '" + initialValue + "' is not a valid enumerator of '" + eDataType . getName ( ) + "'" ) ; return result ; |
public class Headers { /** * / * - - - - - [ Remove ] - - - - - */
public Header remove ( AsciiString name ) { } } | if ( name == null || first == null ) return null ; Header h = null ; int idx = name . hashCode ( ) & ( table . length - 1 ) ; Object obj = table [ idx ] ; if ( obj == null ) return null ; else if ( obj instanceof Header ) { Header header = ( Header ) obj ; if ( header . name . equals ( name ) ) { table [ idx ] = null ; h = header ; } } else { Header headers [ ] = ( Header [ ] ) obj ; for ( int i = 0 ; i < headers . length ; i ++ ) { Header header = headers [ i ] ; if ( header != null && header . name . equals ( name ) ) { headers [ i ] = null ; h = header ; break ; } } } if ( h != null ) removeSameNext ( h ) ; return h ; |
public class ProviGenProvider { /** * Creates a table in the database for the specified { @ link Contract } . < br / >
* This may be used if you ' re < b > not < / b > calling { @ code super . onCreateDatabase ( database ) } .
* @ param database The database .
* @ param contractClass A { @ link Contract } class to create the table with . */
@ SuppressWarnings ( "rawtypes" ) public void createTable ( SQLiteDatabase database , Class contractClass ) { } } | try { openHelper . createTable ( database , new ContractHolder ( contractClass ) ) ; } catch ( InvalidContractException exception ) { exception . printStackTrace ( ) ; } |
public class ResponseImpl { /** * This method gets the Content - Length of the response body .
* @ return The content length of the response . */
public long getContentLength ( ) { } } | try { return getInternalResponse ( ) . body ( ) . contentLength ( ) ; } catch ( NullPointerException e ) { logger . error ( "Failed to get the response content length from " + getRequestURL ( ) + ". Error: " + e . getMessage ( ) ) ; return 0 ; } |
public class LpSolve { /** * Register an < code > LogListener < / code > for callback .
* @ param listener the listener that should be called by lp _ solve
* @ param userhandle an arbitrary object that is passed to the listener on call */
public void putLogfunc ( LogListener listener , Object userhandle ) throws LpSolveException { } } | logListener = listener ; logUserhandle = ( listener != null ) ? userhandle : null ; addLp ( this ) ; registerLogfunc ( ) ; |
public class SessionBuilder { /** * Adds a contact point to use for the initial connection to the cluster .
* < p > You only need this method if you use a custom { @ link EndPoint } implementation . Otherwise ,
* use { @ link # addContactPoint ( InetSocketAddress ) } . */
@ NonNull public SelfT addContactEndPoint ( @ NonNull EndPoint contactPoint ) { } } | this . programmaticContactPoints . add ( contactPoint ) ; return self ; |
public class AbstractMinMaxTextBox { /** * set distance value should be increased / decreased when using up / down buttons .
* @ param pstep step distance */
public void setStep ( final Integer pstep ) { } } | if ( pstep == null ) { getInputElement ( ) . removeAttribute ( "step" ) ; } else { getInputElement ( ) . setStep ( pstep . toString ( ) ) ; } |
public class Node { /** * Convenience method for writeXml ( node , true ) ; */
public T writeXml ( org . w3c . dom . Node node ) throws IOException { } } | return ( T ) writeXml ( node , true ) ; |
public class CudaZeroHandler { /** * Asynchronous version of memcpy
* PLEASE NOTE : This is device - dependent method , if it ' s not supported in your environment , blocking call will be used instead .
* @ param dstBuffer
* @ param srcPointer
* @ param length
* @ param dstOffset */
@ Override public void memcpyAsync ( DataBuffer dstBuffer , Pointer srcPointer , long length , long dstOffset ) { } } | AllocationPoint point = ( ( BaseCudaDataBuffer ) dstBuffer ) . getAllocationPoint ( ) ; // we update host memory regardless .
// Pointer dP = new Pointer ( ( point . getAllocationStatus ( ) = = AllocationStatus . DEVICE ? point . getPointers ( ) . getDevicePointer ( ) . address ( ) : point . getPointers ( ) . getHostPointer ( ) . address ( ) ) + dstOffset ) ;
Pointer dP = new CudaPointer ( ( point . getPointers ( ) . getHostPointer ( ) . address ( ) ) + dstOffset ) ; // Pointer sP = new Pointer ( srcPointer . getNativePointer ( ) ) ;
// log . info ( " Location : " + point . getAllocationStatus ( ) ) ;
// if ( length > 4)
// log . info ( " memcpyAsync : [ " + srcPointer . getNativePointer ( ) + " ] - > [ " + dP . getNativePointer ( ) + " ] , length : [ " + length + " ] , offset : [ " + dstOffset + " ] , dstBufferOffset : [ " + ( dstBuffer . getElementSize ( ) * dstBuffer . offset ( ) ) + " / " + dstBuffer . offset ( ) + " ] " ) ;
CudaContext tContext = null ; if ( dstBuffer . isConstant ( ) ) { org . bytedeco . javacpp . Pointer dstPointer = new CudaPointer ( point . getPointers ( ) . getHostPointer ( ) . address ( ) + dstOffset , 0L ) ; org . bytedeco . javacpp . Pointer srcPointerJ = new CudaPointer ( srcPointer , length ) ; // log . info ( " JCPP Memcpy : [ { } ] - > [ { } ] , length : [ { } ] " , srcPointerJ . address ( ) , dstPointer . address ( ) , length ) ;
val profD = PerformanceTracker . getInstance ( ) . helperStartTransaction ( ) ; org . bytedeco . javacpp . Pointer . memcpy ( dstPointer , srcPointerJ , length ) ; PerformanceTracker . getInstance ( ) . helperRegisterTransaction ( point . getDeviceId ( ) , profD , point . getNumberOfBytes ( ) , MemcpyDirection . HOST_TO_HOST ) ; point . tickHostRead ( ) ; } else { // log . info ( " Memcpy pointers : [ { } ] - > [ { } ] " , srcPointer . address ( ) , dP . address ( ) ) ;
CudaContext context = flowController . prepareAction ( point ) ; tContext = context ; val prof = PerformanceTracker . getInstance ( ) . helperStartTransaction ( ) ; if ( nativeOps . memcpyAsync ( dP , srcPointer , length , CudaConstants . cudaMemcpyHostToHost , context . getSpecialStream ( ) ) == 0 ) throw new IllegalStateException ( "MemcpyAsync H2H failed: [" + srcPointer . address ( ) + "] -> [" + dP . address ( ) + "]" ) ; flowController . commitTransfer ( tContext . getSpecialStream ( ) ) ; PerformanceTracker . getInstance ( ) . helperRegisterTransaction ( point . getDeviceId ( ) , prof , point . getNumberOfBytes ( ) , MemcpyDirection . HOST_TO_HOST ) ; if ( point . getAllocationStatus ( ) == AllocationStatus . HOST ) flowController . registerAction ( context , point ) ; } // if we ' re copying something into host memory , but we ' re on device - we need to provide exact copy to device as well
if ( point . getAllocationStatus ( ) == AllocationStatus . DEVICE ) { // TODO : this sounds wrong , and probably memcpy whould check initial direction , like relocate did before
Pointer rDP = new CudaPointer ( point . getPointers ( ) . getDevicePointer ( ) . address ( ) + dstOffset ) ; if ( tContext == null ) tContext = flowController . prepareAction ( point ) ; // log . info ( " MemcpyAsync to device . . . [ { } ] - > [ { } ] " , dP . getNativePointer ( ) , rDP . getNativePointer ( ) ) ;
val prof = PerformanceTracker . getInstance ( ) . helperStartTransaction ( ) ; if ( nativeOps . memcpyAsync ( rDP , dP , length , CudaConstants . cudaMemcpyHostToDevice , tContext . getSpecialStream ( ) ) == 0 ) throw new IllegalStateException ( "MemcpyAsync H2D failed: [" + dP . address ( ) + "] -> [" + rDP . address ( ) + "]" ) ; flowController . commitTransfer ( tContext . getSpecialStream ( ) ) ; PerformanceTracker . getInstance ( ) . helperRegisterTransaction ( point . getDeviceId ( ) , prof , point . getNumberOfBytes ( ) , MemcpyDirection . HOST_TO_DEVICE ) ; flowController . registerAction ( tContext , point ) ; } point . tickDeviceWrite ( ) ; |
public class AbstractCurrencyConversion { /** * Optionally rounds the factor to be used . By default this method will only round
* as much as it is needed , so the factor can be handled by the target amount instance based on its
* numeric capabilities . Rounding is applied only if { @ code amount . getContext ( ) . getMaxScale ( ) > 0 } as follows :
* < ul >
* < li > If the amount provides a { @ link MathContext } as context property this is used . < / li >
* < li > If the amount provides a { @ link RoundingMode } , this is used ( default is
* { @ code RoundingMode . HALF _ EVEN } ) . < / li >
* < li > By default the scale used is scale of the conversion factor . If the acmount allows a higher
* scale based on { @ code amount . getContext ( ) . getMaxScale ( ) } , this higher scale is used . < / li >
* < / ul >
* @ param amount the amount , not null .
* @ param factor the factor
* @ return the new rounding factor , never null . */
protected NumberValue roundFactor ( MonetaryAmount amount , NumberValue factor ) { } } | if ( amount . getContext ( ) . getMaxScale ( ) > 0 ) { MathContext mathContext = amount . getContext ( ) . get ( MathContext . class ) ; if ( mathContext == null ) { int scale = factor . getScale ( ) ; if ( factor . getScale ( ) > amount . getContext ( ) . getMaxScale ( ) ) { scale = amount . getContext ( ) . getMaxScale ( ) ; } RoundingMode roundingMode = amount . getContext ( ) . get ( RoundingMode . class ) ; if ( roundingMode == null ) { roundingMode = RoundingMode . HALF_EVEN ; } mathContext = new MathContext ( scale , roundingMode ) ; } return factor . round ( mathContext ) ; } return factor ; |
public class FileUtils { /** * Asserts that the given file exists .
* @ param path the { @ link File } to assert for existence .
* @ return a reference back to the file .
* @ throws java . io . FileNotFoundException if the file does not exist .
* @ see # isExisting ( File ) */
@ NullSafe public static File assertExists ( File path ) throws FileNotFoundException { } } | if ( isExisting ( path ) ) { return path ; } throw new FileNotFoundException ( String . format ( "[%1$s] was not found" , path ) ) ; |
public class AWSCodeCommitClient { /** * Creates a commit for a repository on the tip of a specified branch .
* @ param createCommitRequest
* @ return Result of the CreateCommit operation returned by the service .
* @ throws RepositoryNameRequiredException
* A repository name is required but was not specified .
* @ throws InvalidRepositoryNameException
* At least one specified repository name is not valid . < / p > < note >
* This exception only occurs when a specified repository name is not valid . Other exceptions occur when a
* required repository parameter is missing , or when a specified repository does not exist .
* @ throws RepositoryDoesNotExistException
* The specified repository does not exist .
* @ throws ParentCommitIdRequiredException
* A parent commit ID is required . To view the full commit ID of a branch in a repository , use
* < a > GetBranch < / a > or a Git command ( for example , git pull or git log ) .
* @ throws InvalidParentCommitIdException
* The parent commit ID is not valid . The commit ID cannot be empty , and must match the head commit ID for
* the branch of the repository where you want to add or update a file .
* @ throws ParentCommitDoesNotExistException
* The parent commit ID is not valid because it does not exist . The specified parent commit ID does not
* exist in the specified branch of the repository .
* @ throws ParentCommitIdOutdatedException
* The file could not be added because the provided parent commit ID is not the current tip of the specified
* branch . To view the full commit ID of the current head of the branch , use < a > GetBranch < / a > .
* @ throws BranchNameRequiredException
* A branch name is required but was not specified .
* @ throws InvalidBranchNameException
* The specified reference name is not valid .
* @ throws BranchDoesNotExistException
* The specified branch does not exist .
* @ throws BranchNameIsTagNameException
* The specified branch name is not valid because it is a tag name . Type the name of a current branch in the
* repository . For a list of valid branch names , use < a > ListBranches < / a > .
* @ throws FileEntryRequiredException
* The commit cannot be created because no files have been specified as added , updated , or changed ( PutFile
* or DeleteFile ) for the commit .
* @ throws MaximumFileEntriesExceededException
* The number of specified files to change as part of this commit exceeds the maximum number of files that
* can be changed in a single commit . Consider using a Git client for these changes .
* @ throws PutFileEntryConflictException
* The commit cannot be created because one or more files specified in the commit reference both a file and
* a folder .
* @ throws SourceFileOrContentRequiredException
* The commit cannot be created because no source files or file content have been specified for the commit .
* @ throws FileContentAndSourceFileSpecifiedException
* The commit cannot be created because both a source file and file content have been specified for the same
* file . You cannot provide both . Either specify a source file , or provide the file content directly .
* @ throws PathRequiredException
* The folderPath for a location cannot be null .
* @ throws InvalidPathException
* The specified path is not valid .
* @ throws SamePathRequestException
* The commit cannot be created because one or more changes in this commit duplicate actions in the same
* file path . For example , you cannot make the same delete request to the same file in the same file path
* twice , or make a delete request and a move request to the same file as part of the same commit .
* @ throws FileDoesNotExistException
* The specified file does not exist . Verify that you have provided the correct name of the file , including
* its full path and extension .
* @ throws FileContentSizeLimitExceededException
* The file cannot be added because it is too large . The maximum file size that can be added using PutFile
* is 6 MB , and the combined file content change size is 7 MB . Consider making these changes using a Git
* client .
* @ throws FolderContentSizeLimitExceededException
* The commit cannot be created because at least one of the overall changes in the commit result in a folder
* contents exceeding the limit of 6 MB . Either reduce the number and size of your changes , or split the
* changes across multiple folders .
* @ throws InvalidDeletionParameterException
* The specified deletion parameter is not valid .
* @ throws RestrictedSourceFileException
* The commit cannot be created because one of the changes specifies copying or moving a . gitkeep file .
* @ throws FileModeRequiredException
* The commit cannot be created because a file mode is required to update mode permissions for an existing
* file , but no file mode has been specified .
* @ throws InvalidFileModeException
* The specified file mode permission is not valid . For a list of valid file mode permissions , see
* < a > PutFile < / a > .
* @ throws NameLengthExceededException
* The user name is not valid because it has exceeded the character limit for file names . File names ,
* including the path to the file , cannot exceed the character limit .
* @ throws InvalidEmailException
* The specified email address either contains one or more characters that are not allowed , or it exceeds
* the maximum number of characters allowed for an email address .
* @ throws CommitMessageLengthExceededException
* The commit message is too long . Provide a shorter string .
* @ throws EncryptionIntegrityChecksFailedException
* An encryption integrity check failed .
* @ throws EncryptionKeyAccessDeniedException
* An encryption key could not be accessed .
* @ throws EncryptionKeyDisabledException
* The encryption key is disabled .
* @ throws EncryptionKeyNotFoundException
* No encryption key was found .
* @ throws EncryptionKeyUnavailableException
* The encryption key is not available .
* @ throws NoChangeException
* The commit cannot be created because no changes will be made to the repository as a result of this
* commit . A commit must contain at least one change .
* @ throws FileNameConflictsWithDirectoryNameException
* A file cannot be added to the repository because the specified file name has the same name as a directory
* in this repository . Either provide another name for the file , or add the file in a directory that does
* not match the file name .
* @ throws DirectoryNameConflictsWithFileNameException
* A file cannot be added to the repository because the specified path name has the same name as a file that
* already exists in this repository . Either provide a different name for the file , or specify a different
* path for the file .
* @ throws FilePathConflictsWithSubmodulePathException
* The commit cannot be created because a specified file path points to a submodule . Verify that the
* destination files have valid file paths that do not point to a submodule .
* @ sample AWSCodeCommit . CreateCommit
* @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / codecommit - 2015-04-13 / CreateCommit " target = " _ top " > AWS API
* Documentation < / a > */
@ Override public CreateCommitResult createCommit ( CreateCommitRequest request ) { } } | request = beforeClientExecution ( request ) ; return executeCreateCommit ( request ) ; |
public class ImmutableBiMap { /** * Returns an immutable bimap containing the given entries .
* @ throws IllegalArgumentException if two keys have the same value or two
* values have the same key
* @ throws NullPointerException if any key , value , or entry is null
* @ since 19.0 */
@ Beta public static < K , V > ImmutableBiMap < K , V > copyOf ( Iterable < ? extends Entry < ? extends K , ? extends V > > entries ) { } } | Entry < ? , ? > [ ] entryArray = Iterables . toArray ( entries , EMPTY_ENTRY_ARRAY ) ; switch ( entryArray . length ) { case 0 : return of ( ) ; case 1 : @ SuppressWarnings ( "unchecked" ) // safe covariant cast in this context
Entry < K , V > entry = ( Entry < K , V > ) entryArray [ 0 ] ; return of ( entry . getKey ( ) , entry . getValue ( ) ) ; default : return new RegularImmutableBiMap < K , V > ( entryArray ) ; } |
public class JesqueUtils { /** * Join the given strings , separated by the given separator .
* @ param sep
* the separator
* @ param strs
* the strings to join
* @ return the joined string */
public static String join ( final String sep , final Iterable < String > strs ) { } } | final StringBuilder buf = new StringBuilder ( ) ; String prefix = "" ; for ( final String str : strs ) { buf . append ( prefix ) . append ( str ) ; prefix = sep ; } return buf . toString ( ) ; |
public class TransliteratorRegistry { /** * Register an ID and a Transliterator object . This adds an entry
* to the dynamic store , or replaces an existing entry . Any entry
* in the underlying static locale resource store is masked . */
public void put ( String ID , Transliterator trans , boolean visible ) { } } | registerEntry ( ID , trans , visible ) ; |
public class Ifc4FactoryImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
public IfcMechanicalFastenerTypeEnum createIfcMechanicalFastenerTypeEnumFromString ( EDataType eDataType , String initialValue ) { } } | IfcMechanicalFastenerTypeEnum result = IfcMechanicalFastenerTypeEnum . get ( initialValue ) ; if ( result == null ) throw new IllegalArgumentException ( "The value '" + initialValue + "' is not a valid enumerator of '" + eDataType . getName ( ) + "'" ) ; return result ; |
public class MongodbQueryBuilderFactory { /** * 获取builder
* @ return */
public MongodbBuilder builder ( ) { } } | List < IRuleParser > mongodbRuleParsers = new ArrayList < > ( ) ; for ( IRuleParser parser : ruleParsers ) { if ( parser instanceof AbstractMongodbRuleParser ) { mongodbRuleParsers . add ( parser ) ; } } return new MongodbBuilder ( groupParser , mongodbRuleParsers , filters ) ; |
public class StringMan { /** * Takes an array of Strings with integer values and returns an array of ints , or throws a
* { @ link NumberFormatException } if there ' s a problem . */
public static int [ ] parseInts ( String [ ] array ) { } } | int [ ] result = new int [ array . length ] ; for ( int i = 0 ; i < array . length ; i ++ ) result [ i ] = Integer . parseInt ( array [ i ] ) ; return result ; |
public class Roaring64NavigableMap { /** * Returns the number of distinct integers added to the bitmap ( e . g . , number of bits set ) .
* @ return the cardinality */
@ Override public long getLongCardinality ( ) { } } | if ( doCacheCardinalities ) { if ( highToBitmap . isEmpty ( ) ) { return 0L ; } int indexOk = ensureCumulatives ( highestHigh ( ) ) ; // ensureCumulatives may have removed empty bitmaps
if ( highToBitmap . isEmpty ( ) ) { return 0L ; } return sortedCumulatedCardinality [ indexOk - 1 ] ; } else { long cardinality = 0L ; for ( BitmapDataProvider bitmap : highToBitmap . values ( ) ) { cardinality += bitmap . getLongCardinality ( ) ; } return cardinality ; } |
public class DocVectorModel { /** * 查询最相似的前10个文档
* @ param query 查询语句 ( 或者说一个文档的内容 )
* @ return */
public List < Map . Entry < Integer , Float > > nearest ( String query ) { } } | return queryNearest ( query , 10 ) ; |
public class HtmlTable { /** * Conveniently allows column implementations to lookup cells inside a column without
* duplicating the effort to come up with xpath for each column ' s cells . Simply use this method
* with that column ' s index .
* @ return A locator that finds a cell based on a row and column index . It does this by
* constructing an xpath . If a { @ code < tbody > } tag is present , this will use ,
* " . / tbody / tr [ rowIndex ] / td [ colIndex ] " . If no { @ code < tbody > } tag is present ,
* " . / tr [ rowIndex ] / td [ colIndex ] " will be used .
* < p > If your modelling an unconventionally structured html table , you ' re encouraged to override
* this method .
* @ param rowIndex Starting from the top , at 1.
* @ param colIndex Starting from the left , at 1. */
protected Locator byRowColumn ( int rowIndex , int colIndex ) { } } | if ( rowIndex < 1 ) { throw new IllegalArgumentException ( "Row index must be greater than 0." ) ; } if ( colIndex < 1 ) { throw new IllegalArgumentException ( "Column index must be greater than 0." ) ; } String xpath = bodyTag . isPresent ( ) ? "./tbody/tr[" + rowIndex + "]/td[" + colIndex + "]" : "./tr[" + rowIndex + "]/td[" + colIndex + "]" ; return byInner ( By . xpath ( xpath ) ) ; |
public class Util { /** * Returns a thread safe counter map
* @ return */
public static < K , V > CounterMap < K , V > parallelCounterMap ( ) { } } | CounterMap < K , V > totalWords = new CounterMap < > ( ) ; return totalWords ; |
public class LongHashMap { /** * Initializes object to be an empty map with the specified initial
* capacity , which is assumed to be a power of two between
* MINIMUM _ CAPACITY and MAXIMUM _ CAPACITY inclusive . */
private void init ( int initCapacity ) { } } | assert ( initCapacity & - initCapacity ) == initCapacity ; // power of 2
assert initCapacity >= MINIMUM_CAPACITY ; assert initCapacity <= MAXIMUM_CAPACITY ; threshold = ( initCapacity * 3 ) / 4 ; table = new long [ 2 * initCapacity ] ; |
public class ObjectReferenceMarshaller { /** * Marshall the given parameter object . */
public void marshall ( ObjectReference objectReference , ProtocolMarshaller protocolMarshaller ) { } } | if ( objectReference == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( objectReference . getSelector ( ) , SELECTOR_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class DescribeLoadBalancersRequest { /** * The Amazon Resource Names ( ARN ) of the load balancers . You can specify up to 20 load balancers in a single call .
* @ param loadBalancerArns
* The Amazon Resource Names ( ARN ) of the load balancers . You can specify up to 20 load balancers in a single
* call . */
public void setLoadBalancerArns ( java . util . Collection < String > loadBalancerArns ) { } } | if ( loadBalancerArns == null ) { this . loadBalancerArns = null ; return ; } this . loadBalancerArns = new java . util . ArrayList < String > ( loadBalancerArns ) ; |
public class SortHelper { /** * Creates a comparator for the { @ link InputColumn } based on its type
* ( sorting numerically or lexicographically ) .
* @ param column
* the column to sort on
* @ param columnIndex
* the index of the column in the input { @ link Row }
* @ return the comparator instance */
public static Comparator < Row > createComparator ( final InputColumn < ? > column , final int columnIndex ) { } } | final Class < ? > dataType = column . getDataType ( ) ; final boolean isNumber = dataType != null && ReflectionUtils . isNumber ( dataType ) ; final boolean isDate = dataType != null && ReflectionUtils . isDate ( dataType ) ; return ( row1 , row2 ) -> { final Comparable < ? > value1 = getComparableValue ( row1 , columnIndex , isNumber , isDate ) ; final Comparable < ? > value2 = getComparableValue ( row2 , columnIndex , isNumber , isDate ) ; final int comparableResult = CompareUtils . compareUnbound ( value1 , value2 ) ; if ( comparableResult != 0 ) { return comparableResult ; } else { // The values of the data at the row , and column to be
// sorted on are
// exactly the same . Now look at other values of all the
// columns to
// find if the two rows are same .
final int numberOfSelectItems = row1 . getSelectItems ( ) . size ( ) ; for ( int i = 0 ; i < numberOfSelectItems ; i ++ ) { final String rowValue1 = ( String ) row1 . getValue ( i ) ; final String rowValue2 = ( String ) row2 . getValue ( i ) ; final int compareResult = rowValue1 . compareToIgnoreCase ( rowValue2 ) ; if ( compareResult == 0 ) { continue ; } else { return compareResult ; } } } return comparableResult ; } ; |
public class RequestCreator { /** * Asynchronously fulfills the request into the specified { @ link RemoteViews } object with the
* given { @ code viewId } . This is used for loading bitmaps into all instances of a widget . */
public void into ( @ NonNull RemoteViews remoteViews , @ IdRes int viewId , int appWidgetId ) { } } | into ( remoteViews , viewId , new int [ ] { appWidgetId } , null ) ; |
public class InternalPage { /** * Returns unsafe binary search index .
* @ return index ( non - negative or - 1 which means that nothing was found ) */
protected static int binarySearchGuessUnsafe ( @ NotNull final BasePage page , @ NotNull final ByteIterable key ) { } } | int index = page . binarySearch ( key ) ; if ( index < 0 ) { index = - index - 2 ; } return index ; |
public class UIInput { /** * A boolean value that indicates whether an input value is required .
* If this value is true and no input value is provided by a postback operation , then the " requiredMessage " text is
* registered as a FacesMessage for the request , and validation fails .
* Default value : false . */
@ JSFProperty ( defaultValue = "false" ) public boolean isRequired ( ) { } } | return ( Boolean ) getStateHelper ( ) . eval ( PropertyKeys . required , Boolean . FALSE ) ; |
public class SecurityContextProviderImpl { /** * { @ inheritDoc } */
@ Override public ThreadContext createDefaultThreadContext ( Map < String , String > execProps ) { } } | return new SecurityContextImpl ( false , null ) ; |
public class ApiOvhMe { /** * Get this object properties
* REST : GET / me / api / application / { applicationId }
* @ param applicationId [ required ] */
public OvhApplication api_application_applicationId_GET ( Long applicationId ) throws IOException { } } | String qPath = "/me/api/application/{applicationId}" ; StringBuilder sb = path ( qPath , applicationId ) ; String resp = exec ( qPath , "GET" , sb . toString ( ) , null ) ; return convertTo ( resp , OvhApplication . class ) ; |
public class DashboardResources { /** * Return all dashboard objects filtered by owner .
* @ param dashboardName The dashboard name filter .
* @ param owner The principlaUser owner for owner name filter .
* @ parampopulateMetaFieldsOnlyThe flag to determine if only meta fields should be populated .
* @ param version The version of the dashboard to return . It is either null or not empty
* @ return The list of filtered alerts in alert object . */
private List < Dashboard > _getDashboardsByOwner ( String dashboardName , PrincipalUser owner , boolean populateMetaFieldsOnly , String version ) { } } | List < Dashboard > result = new ArrayList < > ( ) ; if ( dashboardName != null && ! dashboardName . isEmpty ( ) ) { Dashboard dashboard = dService . findDashboardByNameAndOwner ( dashboardName , owner ) ; if ( dashboard == null ) { throw new WebApplicationException ( Response . Status . NOT_FOUND . getReasonPhrase ( ) , Response . Status . NOT_FOUND ) ; } result . add ( dashboard ) ; } else { if ( owner . isPrivileged ( ) ) { result = populateMetaFieldsOnly ? dService . findDashboards ( null , true , version ) : dService . findDashboards ( null , false , version ) ; } else { result = populateMetaFieldsOnly ? dService . findDashboardsByOwner ( owner , true , version ) : dService . findDashboardsByOwner ( owner , false , version ) ; } } return result ; |
public class ReflectionInterface { /** * Get the info for the specified method .
* @ param methodName
* The method name
* @ return The Method object
* @ throws FlickrException */
public Method getMethodInfo ( String methodName ) throws FlickrException { } } | Map < String , Object > parameters = new HashMap < String , Object > ( ) ; parameters . put ( "method" , METHOD_GET_METHOD_INFO ) ; parameters . put ( "method_name" , methodName ) ; Response response = transport . get ( transport . getPath ( ) , parameters , apiKey , sharedSecret ) ; if ( response . isError ( ) ) { throw new FlickrException ( response . getErrorCode ( ) , response . getErrorMessage ( ) ) ; } Element methodElement = response . getPayload ( ) ; Method method = new Method ( ) ; method . setName ( methodElement . getAttribute ( "name" ) ) ; method . setNeedsLogin ( "1" . equals ( methodElement . getAttribute ( "needslogin" ) ) ) ; method . setNeedsSigning ( "1" . equals ( methodElement . getAttribute ( "needssigning" ) ) ) ; String requiredPermsStr = methodElement . getAttribute ( "requiredperms" ) ; if ( requiredPermsStr != null && requiredPermsStr . length ( ) > 0 ) { try { int perms = Integer . parseInt ( requiredPermsStr ) ; method . setRequiredPerms ( perms ) ; } catch ( NumberFormatException e ) { // what shall we do ?
e . printStackTrace ( ) ; } } method . setDescription ( XMLUtilities . getChildValue ( methodElement , "description" ) ) ; method . setResponse ( XMLUtilities . getChildValue ( methodElement , "response" ) ) ; method . setExplanation ( XMLUtilities . getChildValue ( methodElement , "explanation" ) ) ; List < Argument > arguments = new ArrayList < Argument > ( ) ; Element argumentsElement = XMLUtilities . getChild ( methodElement , "arguments" ) ; // tolerant fix for incorrect nesting of the < arguments > element
// as observed in current flickr responses of this method
// specified as
// < rsp >
// < method >
// < arguments >
// < errors >
// < method >
// < / rsp >
// observed as
// < rsp >
// < method >
// < arguments >
// < errors >
// < / rsp >
if ( argumentsElement == null ) { _log . debug ( "getMethodInfo: Using workaround for arguments array" ) ; Element parent = ( Element ) methodElement . getParentNode ( ) ; Element child = XMLUtilities . getChild ( parent , "arguments" ) ; if ( child != null ) { argumentsElement = child ; } } NodeList argumentElements = argumentsElement . getElementsByTagName ( "argument" ) ; for ( int i = 0 ; i < argumentElements . getLength ( ) ; i ++ ) { Argument argument = new Argument ( ) ; Element argumentElement = ( Element ) argumentElements . item ( i ) ; argument . setName ( argumentElement . getAttribute ( "name" ) ) ; argument . setOptional ( "1" . equals ( argumentElement . getAttribute ( "optional" ) ) ) ; argument . setDescription ( XMLUtilities . getValue ( argumentElement ) ) ; arguments . add ( argument ) ; } method . setArguments ( arguments ) ; Element errorsElement = XMLUtilities . getChild ( methodElement , "errors" ) ; // tolerant fix for incorrect nesting of the < errors > element
// as observed in current flickr responses of this method
// as of 2006-09-15
// specified as
// < rsp >
// < method >
// < arguments >
// < errors >
// < method >
// < / rsp >
// observed as
// < rsp >
// < method >
// < arguments >
// < errors >
// < / rsp >
if ( errorsElement == null ) { _log . debug ( "getMethodInfo: Using workaround for errors array" ) ; Element parent = ( Element ) methodElement . getParentNode ( ) ; Element child = XMLUtilities . getChild ( parent , "errors" ) ; if ( child != null ) { errorsElement = child ; } } List < Error > errors = new ArrayList < Error > ( ) ; NodeList errorElements = errorsElement . getElementsByTagName ( "error" ) ; for ( int i = 0 ; i < errorElements . getLength ( ) ; i ++ ) { Error error = new Error ( ) ; Element errorElement = ( Element ) errorElements . item ( i ) ; error . setCode ( errorElement . getAttribute ( "code" ) ) ; error . setMessage ( errorElement . getAttribute ( "message" ) ) ; error . setExplaination ( XMLUtilities . getValue ( errorElement ) ) ; errors . add ( error ) ; } method . setErrors ( errors ) ; return method ; |
public class AudienceSegment { /** * Gets the dataProvider value for this AudienceSegment .
* @ return dataProvider * Owner data provider of this segment . This attribute is readonly
* and is assigned by Google . */
public com . google . api . ads . admanager . axis . v201811 . AudienceSegmentDataProvider getDataProvider ( ) { } } | return dataProvider ; |
public class RepositoryApi { /** * Get an archive of the complete repository by SHA ( optional ) and saves to the specified directory .
* If the archive already exists in the directory it will be overwritten .
* < pre > < code > GitLab Endpoint : GET / projects / : id / repository / archive < / code > < / pre >
* @ param projectIdOrPath the project in the form of an Integer ( ID ) , String ( path ) , or Project instance
* @ param sha the SHA of the archive to get
* @ param directory the File instance of the directory to save the archive to , if null will use " java . io . tmpdir "
* @ param format The archive format , defaults to TAR _ GZ if null
* @ return a File instance pointing to the downloaded instance
* @ throws GitLabApiException if any exception occurs */
public File getRepositoryArchive ( Object projectIdOrPath , String sha , File directory , ArchiveFormat format ) throws GitLabApiException { } } | if ( format == null ) { format = ArchiveFormat . TAR_GZ ; } /* * Gitlab - ce has a bug when you try to download file archives with format by using " & format = zip ( or tar . . . etc . ) " ,
* there is a solution to request . . . / archive . : format instead of . . . / archive ? format = : format .
* Issue : https : / / gitlab . com / gitlab - org / gitlab - ce / issues / 45992
* https : / / gitlab . com / gitlab - com / support - forum / issues / 3067 */
Form formData = new GitLabApiForm ( ) . withParam ( "sha" , sha ) ; Response response = getWithAccepts ( Response . Status . OK , formData . asMap ( ) , MediaType . MEDIA_TYPE_WILDCARD , "projects" , getProjectIdOrPath ( projectIdOrPath ) , "repository" , "archive" + "." + format . toString ( ) ) ; try { if ( directory == null ) directory = new File ( System . getProperty ( "java.io.tmpdir" ) ) ; String filename = FileUtils . getFilenameFromContentDisposition ( response ) ; File file = new File ( directory , filename ) ; InputStream in = response . readEntity ( InputStream . class ) ; Files . copy ( in , file . toPath ( ) , StandardCopyOption . REPLACE_EXISTING ) ; return ( file ) ; } catch ( IOException ioe ) { throw new GitLabApiException ( ioe ) ; } |
public class ExpressRouteCrossConnectionsInner { /** * Gets the route table summary associated with the express route cross connection in a resource group .
* @ param resourceGroupName The name of the resource group .
* @ param crossConnectionName The name of the ExpressRouteCrossConnection .
* @ param peeringName The name of the peering .
* @ param devicePath The path of the device .
* @ param serviceCallback the async ServiceCallback to handle successful and failed responses .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the { @ link ServiceFuture } object */
public ServiceFuture < ExpressRouteCrossConnectionsRoutesTableSummaryListResultInner > beginListRoutesTableSummaryAsync ( String resourceGroupName , String crossConnectionName , String peeringName , String devicePath , final ServiceCallback < ExpressRouteCrossConnectionsRoutesTableSummaryListResultInner > serviceCallback ) { } } | return ServiceFuture . fromResponse ( beginListRoutesTableSummaryWithServiceResponseAsync ( resourceGroupName , crossConnectionName , peeringName , devicePath ) , serviceCallback ) ; |
public class FileConsumer { /** * Push message through pipe
* @ param pipe
* Pipe
* @ param message
* Message to push
* @ throws IOException
* if message could not be written */
@ SuppressWarnings ( "rawtypes" ) public void pushMessage ( IPipe pipe , IMessage message ) throws IOException { } } | if ( message instanceof RTMPMessage ) { final IRTMPEvent msg = ( ( RTMPMessage ) message ) . getBody ( ) ; // if writes are delayed , queue the data and sort it by time
if ( queue == null ) { if ( usePriority ) { if ( log . isTraceEnabled ( ) ) { log . trace ( "Creating priority typed packet queue. queueThreshold={}" , queueThreshold ) ; } // if we want ordering / comparing built - in
queue = new PriorityBlockingQueue < > ( queueThreshold <= 0 ? 240 : queueThreshold , comparator ) ; } else { if ( log . isTraceEnabled ( ) ) { log . trace ( "Creating non-priority typed packet queue" ) ; } // process as received
queue = new LinkedBlockingQueue < > ( ) ; } } if ( msg instanceof IStreamData ) { // get the type
byte dataType = msg . getDataType ( ) ; // get the timestamp
int timestamp = msg . getTimestamp ( ) ; if ( log . isTraceEnabled ( ) ) { log . trace ( "Stream data, body saved, timestamp: {} data type: {} class type: {}" , timestamp , dataType , msg . getClass ( ) . getName ( ) ) ; } // if the last message was a reset or we just started , use the header timer
if ( startTimestamp == - 1 ) { startTimestamp = timestamp ; timestamp = 0 ; } else { timestamp -= startTimestamp ; } // offer to the queue
try { QueuedMediaData queued = new QueuedMediaData ( timestamp , dataType , ( IStreamData ) msg ) ; if ( log . isTraceEnabled ( ) ) { log . trace ( "Inserting packet into queue. timestamp: {} queue size: {}, codecId={}, isConfig={}" , timestamp , queue . size ( ) , queued . codecId , queued . config ) ; } if ( queue . size ( ) > queueThreshold ) { if ( queue . size ( ) % 20 == 0 ) { log . warn ( "Queue size is greater than threshold. queue size={} threshold={}" , queue . size ( ) , queueThreshold ) ; } } if ( queue . size ( ) < 2 * queueThreshold ) { // Cap queue size to prevent a runaway stream causing OOM .
queue . offer ( queued , offerTimeout , TimeUnit . MILLISECONDS ) ; } } catch ( InterruptedException e ) { log . warn ( "Stream data was not accepted by the queue - timestamp: {} data type: {}" , timestamp , dataType , e ) ; } } // initialize a writer
if ( writer == null ) { executor . submit ( new Runnable ( ) { public void run ( ) { Thread . currentThread ( ) . setName ( "ProFileConsumer-" + path . getFileName ( ) ) ; try { if ( log . isTraceEnabled ( ) ) { log . trace ( "Running FileConsumer thread. queue size: {} initialized: {} writerNotNull={}" , queue . size ( ) , initialized , ( writer != null ) ) ; } init ( ) ; while ( writer != null ) { if ( log . isTraceEnabled ( ) ) { log . trace ( "Processing packet from queue. queue size: {}" , queue . size ( ) ) ; } try { QueuedMediaData queued = queue . take ( ) ; if ( queued != null ) { // get data type
byte dataType = queued . getDataType ( ) ; // get timestamp
int timestamp = queued . getTimestamp ( ) ; ITag tag = queued . getData ( ) ; // ensure that our first video frame written is a key frame
if ( queued . isVideo ( ) ) { if ( log . isTraceEnabled ( ) ) { log . trace ( "pushMessage video - waitForKeyframe: {} gotKeyframe: {} timestamp: {}" , waitForVideoKeyframe , gotKeyFrame , queued . getTimestamp ( ) ) ; } if ( queued . codecId == VideoCodec . AVC . getId ( ) ) { if ( queued . isConfig ( ) ) { videoConfigurationTag = tag ; gotKeyFrame = true ; } if ( videoConfigurationTag == null && waitForVideoKeyframe ) { continue ; } } else { if ( queued . frameType == VideoData . FrameType . KEYFRAME ) { gotKeyFrame = true ; } if ( waitForVideoKeyframe && ! gotKeyFrame ) { continue ; } } } else if ( queued . isAudio ( ) ) { if ( queued . isConfig ( ) ) { audioConfigurationTag = tag ; } } if ( queued . isVideo ( ) ) { if ( log . isTraceEnabled ( ) ) { log . trace ( "Writing packet. frameType={} timestamp={}" , queued . frameType , queued . getTimestamp ( ) ) ; } } // write
write ( dataType , timestamp , tag ) ; // clean up
queued . dispose ( ) ; } else { if ( log . isTraceEnabled ( ) ) { log . trace ( "Queued media is null. queue size: {}" , queue . size ( ) ) ; } } } catch ( InterruptedException e ) { log . warn ( "{}" , e . getMessage ( ) , e ) ; } // finally {
// if ( log . isTraceEnabled ( ) ) {
// log . trace ( " Clearing queue . queue size : { } " , queue . size ( ) ) ;
// queue . clear ( ) ;
} } catch ( IOException e ) { log . warn ( "{}" , e . getMessage ( ) , e ) ; } } } ) ; } } else if ( message instanceof ResetMessage ) { startTimestamp = - 1 ; } else if ( log . isDebugEnabled ( ) ) { log . debug ( "Ignoring pushed message: {}" , message ) ; } |
public class MBeanServerProxy { /** * { @ inheritDoc } */
public void addNotificationListener ( ObjectName name , NotificationListener listener , NotificationFilter filter , Object handback ) throws InstanceNotFoundException { } } | delegate . addNotificationListener ( name , listener , filter , handback ) ; |
public class ResourceChange { /** * Information about the resource changes .
* @ param details
* Information about the resource changes . */
public void setDetails ( java . util . Collection < ResourceChangeDetail > details ) { } } | if ( details == null ) { this . details = null ; return ; } this . details = new java . util . ArrayList < ResourceChangeDetail > ( details ) ; |
public class Math { /** * Returns the row sums for a matrix . */
public static double [ ] rowSums ( double [ ] [ ] data ) { } } | double [ ] x = new double [ data . length ] ; for ( int i = 0 ; i < x . length ; i ++ ) { x [ i ] = sum ( data [ i ] ) ; } return x ; |
public class MetricsImpl { /** * Retrieve metric data .
* Gets metric values for a single metric .
* @ param appId ID of the application . This is Application ID from the API Access settings blade in the Azure portal .
* @ param metricId ID of the metric . This is either a standard AI metric , or an application - specific custom metric . Possible values include : ' requests / count ' , ' requests / duration ' , ' requests / failed ' , ' users / count ' , ' users / authenticated ' , ' pageViews / count ' , ' pageViews / duration ' , ' client / processingDuration ' , ' client / receiveDuration ' , ' client / networkDuration ' , ' client / sendDuration ' , ' client / totalDuration ' , ' dependencies / count ' , ' dependencies / failed ' , ' dependencies / duration ' , ' exceptions / count ' , ' exceptions / browser ' , ' exceptions / server ' , ' sessions / count ' , ' performanceCounters / requestExecutionTime ' , ' performanceCounters / requestsPerSecond ' , ' performanceCounters / requestsInQueue ' , ' performanceCounters / memoryAvailableBytes ' , ' performanceCounters / exceptionsPerSecond ' , ' performanceCounters / processCpuPercentage ' , ' performanceCounters / processIOBytesPerSecond ' , ' performanceCounters / processPrivateBytes ' , ' performanceCounters / processorCpuPercentage ' , ' availabilityResults / availabilityPercentage ' , ' availabilityResults / duration ' , ' billing / telemetryCount ' , ' customEvents / count '
* @ param timespan The timespan over which to retrieve metric values . This is an ISO8601 time period value . If timespan is omitted , a default time range of ` PT12H ` ( " last 12 hours " ) is used . The actual timespan that is queried may be adjusted by the server based . In all cases , the actual time span used for the query is included in the response .
* @ param interval The time interval to use when retrieving metric values . This is an ISO8601 duration . If interval is omitted , the metric value is aggregated across the entire timespan . If interval is supplied , the server may adjust the interval to a more appropriate size based on the timespan used for the query . In all cases , the actual interval used for the query is included in the response .
* @ param aggregation The aggregation to use when computing the metric values . To retrieve more than one aggregation at a time , separate them with a comma . If no aggregation is specified , then the default aggregation for the metric is used .
* @ param segment The name of the dimension to segment the metric values by . This dimension must be applicable to the metric you are retrieving . To segment by more than one dimension at a time , separate them with a comma ( , ) . In this case , the metric data will be segmented in the order the dimensions are listed in the parameter .
* @ param top The number of segments to return . This value is only valid when segment is specified .
* @ param orderby The aggregation function and direction to sort the segments by . This value is only valid when segment is specified .
* @ param filter An expression used to filter the results . This value should be a valid OData filter expression where the keys of each clause should be applicable dimensions for the metric you are retrieving .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ throws ErrorResponseException thrown if the request is rejected by server
* @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
* @ return the MetricsResult object if successful . */
public MetricsResult get ( String appId , MetricId metricId , String timespan , Period interval , List < MetricsAggregation > aggregation , List < MetricsSegment > segment , Integer top , String orderby , String filter ) { } } | return getWithServiceResponseAsync ( appId , metricId , timespan , interval , aggregation , segment , top , orderby , filter ) . toBlocking ( ) . single ( ) . body ( ) ; |
public class CmsDirectEditEntryPoint { /** * Initializes the direct edit buttons . < p > */
public void initializeButtons ( ) { } } | List < Element > editableElements = CmsDomUtil . getElementsByClass ( CmsGwtConstants . CLASS_EDITABLE , Tag . div ) ; List < CmsDirectEditButtons > editables = Lists . newArrayList ( ) ; for ( Element elem : editableElements ) { CmsPositionBean pos = CmsDomUtil . getEditablePosition ( elem ) ; m_positions . put ( elem . getId ( ) , pos ) ; } CmsEditablePositionCalculator posCalc = new CmsEditablePositionCalculator ( m_positions ) ; m_buttonPositions = posCalc . calculatePositions ( ) ; for ( Element elem : editableElements ) { CmsDirectEditButtons directEdit = processEditableElement ( elem ) ; m_directEditButtons . put ( elem . getId ( ) , directEdit ) ; editables . add ( directEdit ) ; } Window . addResizeHandler ( new ResizeHandler ( ) { public void onResize ( ResizeEvent event ) { repositionButtons ( ) ; } } ) ; |
public class DragItemRecyclerView { /** * Returns the child view under the specific x , y coordinate .
* This method will take margins of the child into account when finding it . */
public View findChildView ( float x , float y ) { } } | final int count = getChildCount ( ) ; if ( y <= 0 && count > 0 ) { return getChildAt ( 0 ) ; } for ( int i = count - 1 ; i >= 0 ; i -- ) { final View child = getChildAt ( i ) ; MarginLayoutParams params = ( MarginLayoutParams ) child . getLayoutParams ( ) ; if ( x >= child . getLeft ( ) - params . leftMargin && x <= child . getRight ( ) + params . rightMargin && y >= child . getTop ( ) - params . topMargin && y <= child . getBottom ( ) + params . bottomMargin ) { return child ; } } return null ; |
public class WTextField { /** * Override validateComponent to perform further validation .
* @ param diags the list into which any validation diagnostics are added . */
@ Override protected void validateComponent ( final List < Diagnostic > diags ) { } } | super . validateComponent ( diags ) ; if ( ! isEmpty ( ) ) { String value = getValueAsString ( ) ; // Maximum Length
int max = getMaxLength ( ) ; if ( max > 0 && value . length ( ) > max ) { diags . add ( createErrorDiagnostic ( InternalMessages . DEFAULT_VALIDATION_ERROR_MAX_LENGTH , this , String . valueOf ( max ) ) ) ; } // Minimum Length
int min = getMinLength ( ) ; if ( min > 0 && value . length ( ) < min ) { diags . add ( createErrorDiagnostic ( InternalMessages . DEFAULT_VALIDATION_ERROR_MIN_LENGTH , this , String . valueOf ( min ) ) ) ; } // Pattern
Pattern pattern = getComponentModel ( ) . pattern ; if ( pattern != null ) { Matcher matcher = pattern . matcher ( value ) ; if ( ! matcher . matches ( ) ) { diags . add ( createErrorDiagnostic ( InternalMessages . DEFAULT_VALIDATION_ERROR_INVALID_PATTERN , this ) ) ; } } } |
public class RelaxedNames { /** * Return a { @ link RelaxedNames } for the given source camelCase source name .
* @ param name the source name in camelCase
* @ return the relaxed names */
public static RelaxedNames forCamelCase ( String name ) { } } | StringBuilder result = new StringBuilder ( ) ; for ( char c : name . toCharArray ( ) ) { result . append ( Character . isUpperCase ( c ) && result . length ( ) > 0 && result . charAt ( result . length ( ) - 1 ) != '-' ? "-" + Character . toLowerCase ( c ) : c ) ; } return new RelaxedNames ( result . toString ( ) ) ; |
public class EntityPropertyDescFactory { /** * プロパティクラス名を処理します 。
* @ param entityDesc エンティティ記述
* @ param propertyDesc エンティティプロパティ記述
* @ param columnMeta カラムメタデータ */
protected void handlePropertyClassName ( EntityDesc entityDesc , EntityPropertyDesc propertyDesc , ColumnMeta columnMeta ) { } } | String defaultClassName = dialect . getMappedPropertyClassName ( columnMeta ) ; String className = propertyClassNameResolver . resolve ( entityDesc , propertyDesc . getName ( ) , defaultClassName ) ; if ( className == null ) { Logger . info ( Message . DOMAGEN0018 . getMessage ( columnMeta . getTableMeta ( ) . getName ( ) , columnMeta . getName ( ) , columnMeta . getTypeName ( ) , columnMeta . getSqlType ( ) ) ) ; className = String . class . getName ( ) ; } propertyDesc . setPropertyClassName ( className ) ; |
public class TagUtils { /** * String - - > boolean
* @ param value
* @ return */
public static boolean getBoolean ( Object value ) { } } | if ( value == null ) { return false ; } if ( value instanceof Boolean ) { return ( ( Boolean ) value ) . booleanValue ( ) ; } return Boolean . valueOf ( value . toString ( ) ) . booleanValue ( ) ; |
public class QQPlot { /** * Generate the quantile - quantile pairs . */
private static double [ ] [ ] quantile ( double [ ] x , Distribution d ) { } } | Arrays . sort ( x ) ; int n = x . length ; double [ ] [ ] q = new double [ n ] [ 2 ] ; for ( int i = 0 ; i < n ; i ++ ) { double p = ( i + 1 ) / ( n + 1.0 ) ; q [ i ] [ 0 ] = x [ ( int ) Math . round ( p * x . length ) ] ; q [ i ] [ 1 ] = d . quantile ( p ) ; } return q ; |
public class XpathUtils { /** * Same as { @ link # asNode ( String , Node ) } but allows an xpath to be
* passed in explicitly for reuse . */
public static Node asNode ( String nodeName , Node node , XPath xpath ) throws XPathExpressionException { } } | if ( node == null ) return null ; return ( Node ) xpath . evaluate ( nodeName , node , XPathConstants . NODE ) ; |
public class CmsListManager { /** * Edits the given list configuration resource . < p >
* @ param resource the configuration resource */
void editListConfiguration ( CmsResource resource ) { } } | try { CmsObject cms = A_CmsUI . getCmsObject ( ) ; String editState ; if ( resource == null ) { editState = CmsEditor . getEditStateForNew ( cms , OpenCms . getResourceManager ( ) . getResourceType ( RES_TYPE_LIST_CONFIG ) , "/" , null , false , UI . getCurrent ( ) . getPage ( ) . getLocation ( ) . toString ( ) ) ; } else { editState = CmsEditor . getEditState ( resource . getStructureId ( ) , false , UI . getCurrent ( ) . getPage ( ) . getLocation ( ) . toString ( ) ) ; } View view = CmsAppWorkplaceUi . get ( ) . getCurrentView ( ) ; if ( view instanceof CmsAppView ) { ( ( CmsAppView ) view ) . setCacheStatus ( CacheStatus . cacheOnce ) ; } CmsAppWorkplaceUi . get ( ) . showApp ( OpenCms . getWorkplaceAppManager ( ) . getAppConfiguration ( CmsEditorConfiguration . APP_ID ) , editState ) ; } catch ( CmsLoaderException e ) { CmsErrorDialog . showErrorDialog ( e ) ; } |
public class ComputePoliciesInner { /** * Lists the Data Lake Analytics compute policies within the specified Data Lake Analytics account . An account supports , at most , 50 policies .
* @ param nextPageLink The NextLink from the previous successful call to List operation .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the observable to the PagedList & lt ; ComputePolicyInner & gt ; object */
public Observable < ServiceResponse < Page < ComputePolicyInner > > > listByAccountNextWithServiceResponseAsync ( final String nextPageLink ) { } } | return listByAccountNextSinglePageAsync ( nextPageLink ) . concatMap ( new Func1 < ServiceResponse < Page < ComputePolicyInner > > , Observable < ServiceResponse < Page < ComputePolicyInner > > > > ( ) { @ Override public Observable < ServiceResponse < Page < ComputePolicyInner > > > call ( ServiceResponse < Page < ComputePolicyInner > > page ) { String nextPageLink = page . body ( ) . nextPageLink ( ) ; if ( nextPageLink == null ) { return Observable . just ( page ) ; } return Observable . just ( page ) . concatWith ( listByAccountNextWithServiceResponseAsync ( nextPageLink ) ) ; } } ) ; |
public class AndroidUtil { /** * Utility function to create a two - level tile cache with the right size , using the size of the map view . This is
* the compatibility version that by default creates a non - persistent cache .
* @ param c the Android context
* @ param id name for the storage directory
* @ param tileSize tile size
* @ param width the width of the map view
* @ param height the height of the map view
* @ param overdraw overdraw allowance
* @ return a new cache created on the external storage */
public static TileCache createTileCache ( Context c , String id , int tileSize , int width , int height , double overdraw ) { } } | return createTileCache ( c , id , tileSize , width , height , overdraw , false ) ; |
public class SemanticPropUtil { /** * Creates SemanticProperties by adding an offset to each input field index of the given SemanticProperties .
* @ param props The SemanticProperties to which the offset is added .
* @ param numInputFields The original number of fields of the input .
* @ param offset The offset that is added to each input field index .
* @ return New SemanticProperties with added offset . */
public static SingleInputSemanticProperties addSourceFieldOffset ( SingleInputSemanticProperties props , int numInputFields , int offset ) { } } | SingleInputSemanticProperties offsetProps = new SingleInputSemanticProperties ( ) ; if ( props . getReadFields ( 0 ) != null ) { FieldSet offsetReadFields = new FieldSet ( ) ; for ( int r : props . getReadFields ( 0 ) ) { offsetReadFields = offsetReadFields . addField ( r + offset ) ; } offsetProps . addReadFields ( offsetReadFields ) ; } for ( int s = 0 ; s < numInputFields ; s ++ ) { FieldSet targetFields = props . getForwardingTargetFields ( 0 , s ) ; for ( int t : targetFields ) { offsetProps . addForwardedField ( s + offset , t ) ; } } return offsetProps ; |
public class BoxRequestUpdateSharedItem { /** * Sets whether the shared link allows downloads in the request .
* @ param canDownload new value for whether the shared link allows downloads .
* @ return request with the updated value for whether the shared link allows downloads . */
protected R setCanDownload ( boolean canDownload ) { } } | JsonObject jsonPermissionsObject = getPermissionsJsonObject ( ) ; jsonPermissionsObject . add ( BoxSharedLink . Permissions . FIELD_CAN_DOWNLOAD , canDownload ) ; BoxSharedLink . Permissions permissions = new BoxSharedLink . Permissions ( jsonPermissionsObject ) ; JsonObject sharedLinkJsonObject = getSharedLinkJsonObject ( ) ; sharedLinkJsonObject . add ( BoxSharedLink . FIELD_PERMISSIONS , permissions . toJsonObject ( ) ) ; BoxSharedLink sharedLink = new BoxSharedLink ( sharedLinkJsonObject ) ; mBodyMap . put ( BoxItem . FIELD_SHARED_LINK , sharedLink ) ; return ( R ) this ; |
public class GraphicalTerminalImplementation { /** * Updates the back buffer ( if necessary ) and draws it to the component ' s surface
* @ param componentGraphics Object to use when drawing to the component ' s surface */
synchronized void paintComponent ( Graphics componentGraphics ) { } } | int width = getWidth ( ) ; int height = getHeight ( ) ; this . scrollController . updateModel ( virtualTerminal . getBufferLineCount ( ) * getFontHeight ( ) , height ) ; boolean needToUpdateBackBuffer = // User has used the scrollbar , we need to update the back buffer to reflect this
lastBufferUpdateScrollPosition != scrollController . getScrollingOffset ( ) || // There is blinking text to update
hasBlinkingText || // We simply have a hint that we should update everything
needFullRedraw ; // Detect resize
if ( width != lastComponentWidth || height != lastComponentHeight ) { int columns = width / getFontWidth ( ) ; int rows = height / getFontHeight ( ) ; TerminalSize terminalSize = virtualTerminal . getTerminalSize ( ) . withColumns ( columns ) . withRows ( rows ) ; virtualTerminal . setTerminalSize ( terminalSize ) ; // Back buffer needs to be updated since the component size has changed
needToUpdateBackBuffer = true ; } if ( needToUpdateBackBuffer ) { updateBackBuffer ( scrollController . getScrollingOffset ( ) ) ; } ensureGraphicBufferHasRightSize ( ) ; Rectangle clipBounds = componentGraphics . getClipBounds ( ) ; if ( clipBounds == null ) { clipBounds = new Rectangle ( 0 , 0 , getWidth ( ) , getHeight ( ) ) ; } componentGraphics . drawImage ( backbuffer , // Destination coordinates
clipBounds . x , clipBounds . y , clipBounds . width , clipBounds . height , // Source coordinates
clipBounds . x , clipBounds . y , clipBounds . width , clipBounds . height , null ) ; // Take care of the left - over area at the bottom and right of the component where no character can fit
// int leftoverHeight = getHeight ( ) % getFontHeight ( ) ;
int leftoverWidth = getWidth ( ) % getFontWidth ( ) ; componentGraphics . setColor ( Color . BLACK ) ; if ( leftoverWidth > 0 ) { componentGraphics . fillRect ( getWidth ( ) - leftoverWidth , 0 , leftoverWidth , getHeight ( ) ) ; } // 0 , 0 , getWidth ( ) , getHeight ( ) , 0 , 0 , getWidth ( ) , getHeight ( ) , null ) ;
this . lastComponentWidth = width ; this . lastComponentHeight = height ; componentGraphics . dispose ( ) ; notifyAll ( ) ; |
public class OstrichOwnerGroup { /** * Returns the specified managed service if this server is responsible for the specified object and has won a
* ZooKeeper - managed leader election .
* @ param name object name . Whether this server owns the object is computed by Ostrich using consistent hashing .
* @ param waitDuration the amount of time to wait for this server to win the leader election and for the service
* to startup , if the object is managed by this server . */
@ Nullable @ Override public T startIfOwner ( String name , Duration waitDuration ) { } } | long timeoutAt = System . currentTimeMillis ( ) + waitDuration . toMillis ( ) ; LeaderService leaderService = _leaderMap . getUnchecked ( name ) . orNull ( ) ; if ( leaderService == null || ! awaitRunning ( leaderService , timeoutAt ) ) { return null ; } Service service ; for ( ; ; ) { Optional < Service > opt = leaderService . getCurrentDelegateService ( ) ; if ( opt . isPresent ( ) ) { service = opt . get ( ) ; break ; } if ( System . currentTimeMillis ( ) >= timeoutAt ) { return null ; } try { Thread . sleep ( 10 ) ; } catch ( InterruptedException e ) { throw Throwables . propagate ( e ) ; } } if ( ! awaitRunning ( service , timeoutAt ) ) { return null ; } // noinspection unchecked
return ( T ) service ; |
public class CmsPropertyResourceComparator { /** * Initializes the comparator key based on the member variables . < p >
* @ param resource the resource to use
* @ param cms the current OpenCms user contxt
* @ param property the name of the sort property ( case sensitive ) */
private void init ( CmsResource resource , CmsObject cms , String property ) { } } | try { cms . readPropertyDefinition ( property ) ; CmsProperty prop = cms . readPropertyObject ( resource , property , false ) ; if ( prop == CmsProperty . getNullProperty ( ) ) { m_propertyValue = "" ; } else { m_propertyValue = prop . getValue ( ) ; } } catch ( CmsDbEntryNotFoundException dbe ) { // property are not configured
throw new CmsIllegalArgumentException ( Messages . get ( ) . container ( Messages . ERR_COLLECTOR_PARAM_PROPERTY_NOT_FOUND_1 , property ) ) ; } catch ( CmsException cmse ) { // something ' s gone wrong . . .
cmse . printStackTrace ( ) ; } |
public class RuleSessionImpl { /** * / * ( non - Javadoc )
* @ see nz . co . senanque . rules . RuleSession # assign ( nz . co . senanque . rules . RuleContext , java . lang . Object , java . util . List ) */
public void assign ( RuleContext ruleContext , Object value , List < ProxyField > list ) { } } | for ( ProxyField proxyField : list ) { assign ( getRuleProxyField ( proxyField ) , value , ruleContext , true ) ; } |
public class LockedObject { /** * deletes this Lock object . assumes that it has no children and no owners ( does not check this itself ) */
public void removeLockedObject ( ) { } } | if ( this != resourceLocks . root && ! this . getPath ( ) . equals ( "/" ) ) { int size = parent . children . length ; for ( int i = 0 ; i < size ; i ++ ) { if ( parent . children [ i ] . equals ( this ) ) { LockedObject [ ] newChildren = new LockedObject [ size - 1 ] ; for ( int i2 = 0 ; i2 < ( size - 1 ) ; i2 ++ ) { if ( i2 < i ) { newChildren [ i2 ] = parent . children [ i2 ] ; } else { newChildren [ i2 ] = parent . children [ i2 + 1 ] ; } } if ( newChildren . length != 0 ) { parent . children = newChildren ; } else { parent . children = null ; } break ; } } // removing from hashtable
resourceLocks . locksByID . remove ( getID ( ) ) ; resourceLocks . locks . remove ( getPath ( ) ) ; // now the garbage collector has some work to do
} |
public class TopNBuffer { /** * Gets record which rank is given value .
* @ param rank rank value to search
* @ return the record which rank is given value */
BaseRow getElement ( int rank ) { } } | int curRank = 0 ; Iterator < Map . Entry < BaseRow , Collection < BaseRow > > > iter = treeMap . entrySet ( ) . iterator ( ) ; while ( iter . hasNext ( ) ) { Map . Entry < BaseRow , Collection < BaseRow > > entry = iter . next ( ) ; Collection < BaseRow > list = entry . getValue ( ) ; Iterator < BaseRow > listIter = list . iterator ( ) ; while ( listIter . hasNext ( ) ) { BaseRow elem = listIter . next ( ) ; curRank += 1 ; if ( curRank == rank ) { return elem ; } } } return null ; |
public class CORSConfigBuilder { /** * The origin values that CORS requests are allowed for , or null to allow all origins .
* @ param allowedOrigins Allowed origins , such as < code > https : / / example . org < / code > or < code > http : / / localhost : 8080 < / code >
* @ return This builder */
public CORSConfigBuilder withAllowedOrigins ( Collection < String > allowedOrigins ) { } } | if ( allowedOrigins != null ) { for ( String allowedOrigin : allowedOrigins ) { if ( ! allowedOrigin . startsWith ( "http://" ) && ! allowedOrigin . startsWith ( "https://" ) ) { throw new IllegalArgumentException ( allowedOrigin + " is invalid: origins much have an http:// or https:// prefix" ) ; } if ( allowedOrigin . lastIndexOf ( '/' ) > 8 ) { throw new IllegalArgumentException ( allowedOrigin + " is invalid: origins should not have any paths. Example origin: https://example.org" ) ; } } } this . allowedOrigins = allowedOrigins ; return this ; |
public class DummyFileObjectStore { /** * ( non - Javadoc )
* @ see com . ibm . ws . objectManager . ObjectStore # captureStatistics ( ) */
public java . util . Map captureStatistics ( ) throws ObjectManagerException { } } | if ( Tracing . isAnyTracingEnabled ( ) && trace . isEntryEnabled ( ) ) trace . entry ( this , cclass , "captureStatistics" ) ; java . util . Map statistics = super . captureStatistics ( ) ; if ( Tracing . isAnyTracingEnabled ( ) && trace . isEntryEnabled ( ) ) trace . exit ( this , cclass , "captureStatistics" , "return statistics=" + statistics ) ; return statistics ; |
public class FlowTypeCheck { /** * Extract the element type from an array . The array type can be null if some
* earlier part of type checking generated an error message and we are just
* continuing after that .
* @ param type
* @ param item
* @ return */
public SemanticType extractElementType ( SemanticType . Array type , SyntacticItem item ) { } } | if ( type == null ) { return null ; } else { return type . getElement ( ) ; } |
public class Hessian2Output { /** * Writes a date to the stream .
* < code > < pre >
* date : : = d b7 b6 b5 b4 b3 b2 b1 b0
* : : = x65 b3 b2 b1 b0
* < / pre > < / code >
* @ param time the date in milliseconds from the epoch in UTC */
public void writeUTCDate ( long time ) throws IOException { } } | if ( SIZE < _offset + 32 ) flushBuffer ( ) ; int offset = _offset ; byte [ ] buffer = _buffer ; if ( time % 60000L == 0 ) { // compact date : : = x65 b3 b2 b1 b0
long minutes = time / 60000L ; if ( ( minutes >> 31 ) == 0 || ( minutes >> 31 ) == - 1 ) { buffer [ offset ++ ] = ( byte ) BC_DATE_MINUTE ; buffer [ offset ++ ] = ( ( byte ) ( minutes >> 24 ) ) ; buffer [ offset ++ ] = ( ( byte ) ( minutes >> 16 ) ) ; buffer [ offset ++ ] = ( ( byte ) ( minutes >> 8 ) ) ; buffer [ offset ++ ] = ( ( byte ) ( minutes >> 0 ) ) ; _offset = offset ; return ; } } buffer [ offset ++ ] = ( byte ) BC_DATE ; buffer [ offset ++ ] = ( ( byte ) ( time >> 56 ) ) ; buffer [ offset ++ ] = ( ( byte ) ( time >> 48 ) ) ; buffer [ offset ++ ] = ( ( byte ) ( time >> 40 ) ) ; buffer [ offset ++ ] = ( ( byte ) ( time >> 32 ) ) ; buffer [ offset ++ ] = ( ( byte ) ( time >> 24 ) ) ; buffer [ offset ++ ] = ( ( byte ) ( time >> 16 ) ) ; buffer [ offset ++ ] = ( ( byte ) ( time >> 8 ) ) ; buffer [ offset ++ ] = ( ( byte ) ( time ) ) ; _offset = offset ; |
public class PropagateRequestImpl { /** * Create smartfox object to response to client
* @ return smartfox parameter object */
private ISFSObject createResponseParams ( ) { } } | return ResponseParamsBuilder . create ( ) . addition ( addition ) . excludedVars ( excludedVars ) . includedVars ( includedVars ) . transformer ( new ParamTransformer ( context ) ) . data ( data ) . build ( ) ; |
public class DoubleTuples { /** * Computes < code > t0 + factor * t1 < / code > , and stores the result in the given
* result tuple .
* @ param t0 The first input tuple
* @ param factor The scaling factor
* @ param t1 The second input tuple
* @ param result The tuple that will store the result
* @ return The result tuple
* @ throws IllegalArgumentException If the given tuples do not
* have the same { @ link Tuple # getSize ( ) size } */
public static MutableDoubleTuple addScaled ( DoubleTuple t0 , double factor , DoubleTuple t1 , MutableDoubleTuple result ) { } } | return DoubleTupleFunctions . apply ( t0 , t1 , ( a , b ) -> ( a + factor * b ) , result ) ; |
public class LoggingConfigUtils { /** * Read integer value from properties : begin by preserving the old value . If
* the property is found , and the new value is an integer , the new value
* will be returned .
* @ param newValue
* New parameter value to parse / evaluate
* @ param defaultValue
* Starting / Previous value
* @ return defaultValue if the newValue is null or is was badly
* formatted , or the converted new value */
public static int getIntValue ( Object newValue , int defaultValue ) { } } | if ( newValue != null ) { if ( newValue instanceof String ) { try { return Integer . parseInt ( ( String ) newValue ) ; } catch ( NumberFormatException ex ) { } } else if ( newValue instanceof Integer ) return ( Integer ) newValue ; } return defaultValue ; |
public class SoftHashMap { /** * { @ inheritDoc } */
@ Override public final V put ( final K key , final V value ) { } } | processQueue ( ) ; internalMap . put ( key , new SoftValue < V > ( value , key , queue ) ) ; return null ; |
public class CollectionUtils { /** * 返回第一个列表中比第二个多出来的元素 */
public static < T > List < T > getLeftDiff ( List < T > list1 , List < T > list2 ) { } } | if ( isEmpty ( list2 ) ) { return list1 ; } List < T > list = new ArrayList < T > ( ) ; if ( isNotEmpty ( list1 ) ) { for ( T o : list1 ) { if ( ! list2 . contains ( o ) ) { list . add ( o ) ; } } } return list ; |
public class RawJsonDocument { /** * Creates a { @ link RawJsonDocument } which the document id , JSON content and the expiration time .
* @ param id the per - bucket unique document id .
* @ param content the content of the document .
* @ param expiry the expiration time of the document .
* @ return a { @ link RawJsonDocument } . */
public static RawJsonDocument create ( String id , int expiry , String content ) { } } | return new RawJsonDocument ( id , expiry , content , 0 , null ) ; |
public class SerializerIntrinsics { /** * Return the Count of Number of Bits Set to 1 ( SSE4.2 ) . */
public final void popcnt ( Register dst , Mem src ) { } } | assert ( ! dst . isRegType ( REG_GPB ) ) ; emitX86 ( INST_POPCNT , dst , src ) ; |
public class RdfConverter { /** * Writes triples which conect properties with there corresponding rdf
* properties for statements , simple statements , qualifiers , reference
* attributes and values .
* @ param document
* @ throws RDFHandlerException */
void writeInterPropertyLinks ( PropertyDocument document ) throws RDFHandlerException { } } | Resource subject = this . rdfWriter . getUri ( document . getEntityId ( ) . getIri ( ) ) ; this . rdfWriter . writeTripleUriObject ( subject , this . rdfWriter . getUri ( Vocabulary . WB_DIRECT_CLAIM_PROP ) , Vocabulary . getPropertyUri ( document . getEntityId ( ) , PropertyContext . DIRECT ) ) ; this . rdfWriter . writeTripleUriObject ( subject , this . rdfWriter . getUri ( Vocabulary . WB_CLAIM_PROP ) , Vocabulary . getPropertyUri ( document . getEntityId ( ) , PropertyContext . STATEMENT ) ) ; this . rdfWriter . writeTripleUriObject ( subject , this . rdfWriter . getUri ( Vocabulary . WB_STATEMENT_PROP ) , Vocabulary . getPropertyUri ( document . getEntityId ( ) , PropertyContext . VALUE_SIMPLE ) ) ; this . rdfWriter . writeTripleUriObject ( subject , this . rdfWriter . getUri ( Vocabulary . WB_STATEMENT_VALUE_PROP ) , Vocabulary . getPropertyUri ( document . getEntityId ( ) , PropertyContext . VALUE ) ) ; this . rdfWriter . writeTripleUriObject ( subject , this . rdfWriter . getUri ( Vocabulary . WB_QUALIFIER_PROP ) , Vocabulary . getPropertyUri ( document . getEntityId ( ) , PropertyContext . QUALIFIER_SIMPLE ) ) ; this . rdfWriter . writeTripleUriObject ( subject , this . rdfWriter . getUri ( Vocabulary . WB_QUALIFIER_VALUE_PROP ) , Vocabulary . getPropertyUri ( document . getEntityId ( ) , PropertyContext . QUALIFIER ) ) ; this . rdfWriter . writeTripleUriObject ( subject , this . rdfWriter . getUri ( Vocabulary . WB_REFERENCE_PROP ) , Vocabulary . getPropertyUri ( document . getEntityId ( ) , PropertyContext . REFERENCE_SIMPLE ) ) ; this . rdfWriter . writeTripleUriObject ( subject , this . rdfWriter . getUri ( Vocabulary . WB_REFERENCE_VALUE_PROP ) , Vocabulary . getPropertyUri ( document . getEntityId ( ) , PropertyContext . REFERENCE ) ) ; this . rdfWriter . writeTripleUriObject ( subject , this . rdfWriter . getUri ( Vocabulary . WB_NO_VALUE_PROP ) , Vocabulary . getPropertyUri ( document . getEntityId ( ) , PropertyContext . NO_VALUE ) ) ; this . rdfWriter . writeTripleUriObject ( subject , this . rdfWriter . getUri ( Vocabulary . WB_NO_QUALIFIER_VALUE_PROP ) , Vocabulary . getPropertyUri ( document . getEntityId ( ) , PropertyContext . NO_QUALIFIER_VALUE ) ) ; // TODO something more with NO _ VALUE |
public class Misc { /** * Check that a parameter is not null and throw IllegalArgumentException with a message of
* errorMessagePrefix + " must not be null . " if it is null , defaulting to " Parameter must not be
* null . " .
* @ param param the parameter to check
* @ param errorMessagePrefix the prefix of the error message to use for the
* IllegalArgumentException if the parameter was null
* @ throws IllegalArgumentException if the parameter was { @ code null } */
public static void checkNotNull ( Object param , String errorMessagePrefix ) throws IllegalArgumentException { } } | checkArgument ( param != null , ( errorMessagePrefix != null ? errorMessagePrefix : "Parameter" ) + " must not be null." ) ; |
public class JwtIssuer { /** * Construct a default JwtClaims
* @ return JwtClaims */
public static JwtClaims getDefaultJwtClaims ( ) { } } | JwtClaims claims = new JwtClaims ( ) ; claims . setIssuer ( jwtConfig . getIssuer ( ) ) ; claims . setAudience ( jwtConfig . getAudience ( ) ) ; claims . setExpirationTimeMinutesInTheFuture ( jwtConfig . getExpiredInMinutes ( ) ) ; claims . setGeneratedJwtId ( ) ; // a unique identifier for the token
claims . setIssuedAtToNow ( ) ; // when the token was issued / created ( now )
claims . setNotBeforeMinutesInThePast ( 2 ) ; // time before which the token is not yet valid ( 2 minutes ago )
claims . setClaim ( "version" , jwtConfig . getVersion ( ) ) ; return claims ; |
public class HostNameValidator { /** * ( non - Javadoc )
* @ see
* com . fs . commons . desktop . validation . Validator # validate ( com . fs . commons . desktop .
* validation . Problems , java . lang . String , java . lang . Object ) */
@ Override public boolean validate ( final Problems problems , final String compName , final String model ) { } } | if ( model . length ( ) == 0 ) { problems . add ( ValidationBundle . getMessage ( HostNameValidator . class , "INVALID_HOST_NAME" , compName , model ) ) ; // NOI18N
return false ; } if ( model . startsWith ( "." ) || model . endsWith ( "." ) ) { // NOI18N
problems . add ( ValidationBundle . getMessage ( IpAddressValidator . class , "HOST_STARTS_OR_ENDS_WITH_PERIOD" , model ) ) ; // NOI18N
return false ; } final String [ ] parts = model . split ( "\\." ) ; if ( parts . length > 4 ) { problems . add ( ValidationBundle . getMessage ( IpAddressValidator . class , "TOO_MANY_LABELS" , model ) ) ; // NOI18N
return false ; } if ( ! this . allowPort && model . contains ( ":" ) ) { // NOI18N
problems . add ( ValidationBundle . getMessage ( HostNameValidator . class , "MSG_PORT_NOT_ALLOWED" , compName , model ) ) ; // NOI18N
return false ; } final boolean result = new MayNotContainSpacesValidator ( ) . validate ( problems , compName , model ) ; if ( ! result ) { return false ; } if ( model . endsWith ( "-" ) || model . startsWith ( "-" ) ) { problems . add ( ValidationBundle . getMessage ( HostNameValidator . class , "INVALID_HOST_NAME" , compName , model ) ) ; // NOI18N
return false ; } for ( int i = 0 ; i < parts . length ; i ++ ) { final String label = parts [ i ] ; if ( label . length ( ) > 63 ) { problems . add ( ValidationBundle . getMessage ( HostNameValidator . class , "LABEL_TOO_LONG" , label ) ) ; // NOI18N
return false ; } if ( i == parts . length - 1 && label . indexOf ( ":" ) > 0 ) { final String [ ] labelAndPort = label . split ( ":" ) ; if ( labelAndPort . length > 2 ) { problems . add ( ValidationBundle . getMessage ( HostNameValidator . class , "INVALID_PORT" , compName , label ) ) ; // NOI18N
return false ; } if ( labelAndPort . length == 1 ) { problems . add ( ValidationBundle . getMessage ( HostNameValidator . class , "INVALID_PORT" , compName , "''" ) ) ; // NOI18N
return false ; } else { if ( label . endsWith ( ":" ) ) { problems . add ( ValidationBundle . getMessage ( HostNameValidator . class , "TOO_MANY_COLONS" , compName , label ) ) ; // NOI18N
return false ; } try { final int port = Integer . parseInt ( labelAndPort [ 1 ] ) ; if ( port < 0 ) { problems . add ( ValidationBundle . getMessage ( IpAddressValidator . class , "NEGATIVE_PORT" , port ) ) ; // NOI18N
return false ; } else if ( port >= 65536 ) { problems . add ( ValidationBundle . getMessage ( IpAddressValidator . class , "PORT_TOO_HIGH" , port ) ) ; // NOI18N
return false ; } } catch ( final NumberFormatException e ) { problems . add ( ValidationBundle . getMessage ( HostNameValidator . class , "INVALID_PORT" , compName , labelAndPort [ 1 ] ) ) ; // NOI18N
return false ; } if ( ! checkHostPart ( labelAndPort [ 0 ] , problems , compName ) ) { return false ; } } } else { if ( ! checkHostPart ( label , problems , compName ) ) { return false ; } } } return true ; |
public class BitmapUtils { /** * Get width and height of the bitmap specified with the { @ link android . net . Uri } .
* @ param resolver the resolver .
* @ param uri the uri that points to the bitmap .
* @ return the size . */
public static Point getSize ( ContentResolver resolver , Uri uri ) { } } | InputStream is = null ; try { BitmapFactory . Options options = new BitmapFactory . Options ( ) ; options . inJustDecodeBounds = true ; is = resolver . openInputStream ( uri ) ; BitmapFactory . decodeStream ( is , null , options ) ; int width = options . outWidth ; int height = options . outHeight ; return new Point ( width , height ) ; } catch ( FileNotFoundException e ) { Log . e ( TAG , "target file (" + uri + ") does not exist." , e ) ; return null ; } finally { CloseableUtils . close ( is ) ; } |
public class UISelectMany { /** * < p > Return < code > true < / code > if the new value is different from the
* previous value . Value comparison must not be sensitive to element order .
* @ param previous old value of this component
* @ param value new value of this component */
protected boolean compareValues ( Object previous , Object value ) { } } | if ( ( previous == null ) && ( value != null ) ) { return ( true ) ; } else if ( ( previous != null ) && ( value == null ) ) { return ( true ) ; } else if ( ( previous == null ) ) { return ( false ) ; } boolean valueChanged = false ; Object oldarray [ ] ; Object newarray [ ] ; // The arrays may be arrays of primitives ; for simplicity ,
// perform the boxing here .
if ( ! ( previous instanceof Object [ ] ) ) { previous = toObjectArray ( previous ) ; } if ( ! ( value instanceof Object [ ] ) ) { value = toObjectArray ( value ) ; } // If values are still not of the type Object [ ] , it is perhaps a
// mistake by the renderers , so return false , so that
// ValueChangedEvent is not queued in this case .
if ( ! ( previous instanceof Object [ ] ) || ! ( value instanceof Object [ ] ) ) { return false ; } oldarray = ( Object [ ] ) previous ; newarray = ( Object [ ] ) value ; // If we got here then both the arrays cannot be null
// if their lengths vary , return false .
if ( oldarray . length != newarray . length ) { return true ; } // make sure every element in the previous array occurs the same
// number of times in the current array . This should help us
// to find out the values changed are not . Since we cannot assume
// the browser will send the elements in the same order everytime ,
// it will not suffice to just compare the element position and position .
int count1 ; int count2 ; for ( int i = 0 ; i < oldarray . length ; ++ i ) { count1 = countElementOccurrence ( oldarray [ i ] , oldarray ) ; count2 = countElementOccurrence ( oldarray [ i ] , newarray ) ; if ( count1 != count2 ) { valueChanged = true ; break ; } } return valueChanged ; |
public class CommerceOrderItemPersistenceImpl { /** * Returns a range of all the commerce order items where commerceOrderId = & # 63 ; .
* Useful when paginating results . Returns a maximum of < code > end - start < / code > instances . < code > start < / code > and < code > end < / code > are not primary keys , they are indexes in the result set . Thus , < code > 0 < / code > refers to the first result in the set . Setting both < code > start < / code > and < code > end < / code > to { @ link QueryUtil # ALL _ POS } will return the full result set . If < code > orderByComparator < / code > is specified , then the query will include the given ORDER BY logic . If < code > orderByComparator < / code > is absent and pagination is required ( < code > start < / code > and < code > end < / code > are not { @ link QueryUtil # ALL _ POS } ) , then the query will include the default ORDER BY logic from { @ link CommerceOrderItemModelImpl } . If both < code > orderByComparator < / code > and pagination are absent , for performance reasons , the query will not have an ORDER BY clause and the returned result set will be sorted on by the primary key in an ascending order .
* @ param commerceOrderId the commerce order ID
* @ param start the lower bound of the range of commerce order items
* @ param end the upper bound of the range of commerce order items ( not inclusive )
* @ return the range of matching commerce order items */
@ Override public List < CommerceOrderItem > findByCommerceOrderId ( long commerceOrderId , int start , int end ) { } } | return findByCommerceOrderId ( commerceOrderId , start , end , null ) ; |
public class PagedInputBuffer { /** * Select buffer .
* @ param offset the offset */
private void selectPage ( long offset ) { } } | if ( currentBuffer == null || currentBuffer . seekSuccessful ( offset ) ) { currentBuffer = null ; int i = 0 ; // Look if the given offset is already stored in a page
while ( i < pages . size ( ) && currentBuffer == null ) { if ( pages . get ( i ) . seekSuccessful ( offset ) ) currentBuffer = pages . get ( i ) ; i ++ ; } if ( currentBuffer == null ) { // If the offset is not contained in any of the loaded pages , create a new one
// FIFO
if ( pages . size ( ) >= MaxPages ) pages . remove ( 0 ) ; pages . add ( new InputBuffer ( input ) ) ; currentBuffer = pages . get ( pages . size ( ) - 1 ) ; } } |
public class PrefHelper { /** * < p > Sets the value of the { @ link String } key value supplied in preferences . < / p >
* @ param key A { @ link String } value containing the key to reference .
* @ param value A { @ link Boolean } value to set the preference record to . */
public void setBool ( String key , Boolean value ) { } } | prefHelper_ . prefsEditor_ . putBoolean ( key , value ) ; prefHelper_ . prefsEditor_ . apply ( ) ; |
public class GRLINERGImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
public void setXOSSF ( Integer newXOSSF ) { } } | Integer oldXOSSF = xossf ; xossf = newXOSSF ; if ( eNotificationRequired ( ) ) eNotify ( new ENotificationImpl ( this , Notification . SET , AfplibPackage . GRLINERG__XOSSF , oldXOSSF , xossf ) ) ; |
public class NameTable { /** * Convert a Java type into the equivalent JNI type . */
public String getJniType ( TypeMirror type ) { } } | if ( TypeUtil . isPrimitiveOrVoid ( type ) ) { return getPrimitiveObjCType ( type ) ; } else if ( TypeUtil . isArray ( type ) ) { return "jarray" ; } else if ( typeUtil . isString ( type ) ) { return "jstring" ; } else if ( typeUtil . isClassType ( type ) ) { return "jclass" ; } return "jobject" ; |
public class DRL5Lexer { /** * $ ANTLR start " PLUS _ ASSIGN " */
public final void mPLUS_ASSIGN ( ) throws RecognitionException { } } | try { int _type = PLUS_ASSIGN ; int _channel = DEFAULT_TOKEN_CHANNEL ; // src / main / resources / org / drools / compiler / lang / DRL5Lexer . g : 156:5 : ( ' + = ' )
// src / main / resources / org / drools / compiler / lang / DRL5Lexer . g : 156:7 : ' + = '
{ match ( "+=" ) ; if ( state . failed ) return ; } state . type = _type ; state . channel = _channel ; } finally { // do for sure before leaving
} |
public class Quaternion { /** * Sets the value of this quaternion to the conjugate of quaternion q1.
* @ param q1 the source vector */
public final void conjugate ( Quaternion q1 ) { } } | this . x = - q1 . x ; this . y = - q1 . y ; this . z = - q1 . z ; this . w = q1 . w ; |
public class RuleBasedNumberFormat { /** * Set capitalizationForListOrMenu , capitalizationForStandAlone */
private void initCapitalizationContextInfo ( ULocale theLocale ) { } } | ICUResourceBundle rb = ( ICUResourceBundle ) UResourceBundle . getBundleInstance ( ICUData . ICU_BASE_NAME , theLocale ) ; try { ICUResourceBundle rdb = rb . getWithFallback ( "contextTransforms/number-spellout" ) ; int [ ] intVector = rdb . getIntVector ( ) ; if ( intVector . length >= 2 ) { capitalizationForListOrMenu = ( intVector [ 0 ] != 0 ) ; capitalizationForStandAlone = ( intVector [ 1 ] != 0 ) ; } } catch ( MissingResourceException e ) { // use default
} |
public class HibernateClient { /** * ( non - Javadoc )
* @ see
* com . impetus . kundera . client . ClientBase # onPersist ( com . impetus . kundera . metadata
* . model . EntityMetadata , java . lang . Object , java . lang . Object ,
* java . util . List ) */
@ Override protected void onPersist ( EntityMetadata metadata , Object entity , Object id , List < RelationHolder > relationHolders ) { } } | boolean proxyRemoved = removeKunderaProxies ( metadata , entity , relationHolders ) ; Transaction tx = null ; s = getStatelessSession ( ) ; tx = onBegin ( ) ; try { if ( ! isUpdate ) { id = s . insert ( entity ) ; // Update foreign Keys
updateForeignKeys ( metadata , id , relationHolders ) ; onCommit ( tx ) ; /* tx . commit ( ) ; */
} else { s . update ( entity ) ; if ( proxyRemoved ) { updateForeignKeys ( metadata , id , relationHolders ) ; } onCommit ( tx ) ; } } // TODO : Bad code , get rid of these exceptions , currently necessary for
// handling many to one case
catch ( org . hibernate . exception . ConstraintViolationException e ) { s . update ( entity ) ; log . info ( e . getMessage ( ) ) ; onCommit ( tx ) ; // tx . commit ( ) ;
} catch ( HibernateException e ) { log . error ( "Error while persisting object of {}, Caused by {}." , metadata . getEntityClazz ( ) , e ) ; throw new PersistenceException ( e ) ; } |
public class XmppClient { /** * Calculates the SHA1 Digest of a given input .
* @ param input the input
* @ return the string
* @ throws NoSuchAlgorithmException the no such algorithm exception */
@ SneakyThrows ( IOException . class ) public static String sha1 ( String input ) throws NoSuchAlgorithmException { } } | MessageDigest mDigest = MessageDigest . getInstance ( "SHA1" ) ; byte [ ] result = mDigest . digest ( input . getBytes ( "UTF-8" ) ) ; String resultString = String . format ( "%040x" , new BigInteger ( 1 , result ) ) ; return resultString ; |
public class Configuration { /** * Reload existing configuration instances . */
public static synchronized void reloadExistingConfigurations ( ) { } } | if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( "Reloading " + REGISTRY . keySet ( ) . size ( ) + " existing configurations" ) ; } for ( Configuration conf : REGISTRY . keySet ( ) ) { conf . reloadConfiguration ( ) ; } |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.