signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class ZonedDateTimeParameter { /** * Write data to socket in binary format . * @ param pos socket output stream * @ throws IOException if socket error occur */ public void writeBinary ( final PacketOutputStream pos ) throws IOException { } }
pos . write ( ( byte ) ( fractionalSeconds ? 11 : 7 ) ) ; // length pos . writeShort ( ( short ) tz . getYear ( ) ) ; pos . write ( ( byte ) ( ( tz . getMonth ( ) . getValue ( ) ) & 0xff ) ) ; pos . write ( ( byte ) ( tz . getDayOfMonth ( ) & 0xff ) ) ; pos . write ( ( byte ) tz . getHour ( ) ) ; pos . write ( ( byte ) tz . getMinute ( ) ) ; pos . write ( ( byte ) tz . getSecond ( ) ) ; if ( fractionalSeconds ) { pos . writeInt ( tz . getNano ( ) / 1000 ) ; }
public class CollectionManager { /** * Gets iterator for Number range from 0. * Range from 0 up to number or 0 down to number inclusive . * Empty Iterator if number is 0. * @ param obj the top range number value * @ return the bsh iterator */ public Iterator < Integer > getBshIterator ( final Number obj ) { } }
int number = obj . intValue ( ) ; if ( number == 0 ) return this . emptyIt ( ) ; if ( number > 0 ) return IntStream . rangeClosed ( 0 , number ) . iterator ( ) ; return IntStream . rangeClosed ( number , 0 ) . map ( i -> number - i ) . iterator ( ) ;
public class FileUtil { /** * Load bytes from the given url . * @ param url * Local of the file to load . * @ return Bytes loaded from the given url . */ public static byte [ ] loadBytes ( URL url ) { } }
byte [ ] bytes = null ; try { bytes = loadBytes ( url . openStream ( ) ) ; } catch ( IOException ex ) { // ex . printStackTrace ( ) ; } return bytes ;
public class QuorumJournalManager { /** * Get input stream to one of the nodes for given txid . */ @ Override public ImageInputStream getImageInputStream ( long txid ) throws IOException { } }
URLImageInputStream stream = loggers . getImageInputStream ( txid , httpConnectReadTimeoutMs ) ; if ( stream == null ) { throw new IOException ( "Cannot obtain input stream for image: " + txid ) ; } return new ImageInputStream ( txid , stream , stream . getImageDigest ( ) , stream . toString ( ) , stream . getSize ( ) ) ;
public class Choice3 { /** * Static factory method for wrapping a value of type < code > A < / code > in a { @ link Choice3 } . * @ param a the value * @ param < A > the first possible type * @ param < B > the second possible type * @ param < C > the third possible type * @ return the wrapped value as a { @ link Choice3 } & lt ; A , B , C & gt ; */ public static < A , B , C > Choice3 < A , B , C > a ( A a ) { } }
return new _A < > ( a ) ;
public class ZWaveController { /** * Checks for dead or sleeping nodes during Node initialization . * JwS : merged checkInitComplete and checkForDeadOrSleepingNodes to prevent possibly looping nodes multiple times . */ public void checkForDeadOrSleepingNodes ( ) { } }
int completeCount = 0 ; if ( zwaveNodes . isEmpty ( ) ) return ; // There are still nodes waiting to get a ping . // So skip the dead node checking . for ( SerialMessage serialMessage : sendQueue ) { if ( serialMessage . getPriority ( ) == SerialMessage . SerialMessagePriority . Low ) return ; } logger . trace ( "Checking for Dead or Sleeping Nodes." ) ; for ( Map . Entry < Integer , ZWaveNode > entry : zwaveNodes . entrySet ( ) ) { if ( entry . getValue ( ) . getNodeStage ( ) == ZWaveNode . NodeStage . NODEBUILDINFO_EMPTYNODE ) continue ; logger . debug ( String . format ( "Node %d has been in Stage %s since %s" , entry . getKey ( ) , entry . getValue ( ) . getNodeStage ( ) . getLabel ( ) , entry . getValue ( ) . getQueryStageTimeStamp ( ) . toString ( ) ) ) ; if ( entry . getValue ( ) . getNodeStage ( ) == ZWaveNode . NodeStage . NODEBUILDINFO_DONE || ! entry . getValue ( ) . isListening ( ) ) { completeCount ++ ; continue ; } logger . trace ( "Checking if {} miliseconds have passed in current stage." , QUERY_STAGE_TIMEOUT ) ; if ( Calendar . getInstance ( ) . getTimeInMillis ( ) < ( entry . getValue ( ) . getQueryStageTimeStamp ( ) . getTime ( ) + QUERY_STAGE_TIMEOUT ) ) continue ; logger . warn ( String . format ( "Node %d may be dead, setting stage to DEAD." , entry . getKey ( ) ) ) ; entry . getValue ( ) . setNodeStage ( ZWaveNode . NodeStage . NODEBUILDINFO_DEAD ) ; completeCount ++ ; } if ( this . zwaveNodes . size ( ) == completeCount ) { ZWaveEvent zEvent = new ZWaveEvent ( ZWaveEventType . NETWORK_EVENT , 1 , 0 , "INIT_DONE" ) ; this . notifyEventListeners ( zEvent ) ; }
public class CastScopeSession { /** * create a scope for cast operator . * @ param context the context . * @ param reference the reference to the internal feature . * @ param resolvedTypes the resolved types . * @ return the scope . */ protected IScope createCastOperatorScope ( EObject context , EReference reference , IResolvedTypes resolvedTypes ) { } }
if ( ! ( context instanceof SarlCastedExpression ) ) { return IScope . NULLSCOPE ; } final SarlCastedExpression call = ( SarlCastedExpression ) context ; final XExpression receiver = call . getTarget ( ) ; if ( receiver == null ) { return IScope . NULLSCOPE ; } return getFeatureScopes ( ) . createFeatureCallScopeForReceiver ( call , receiver , getParent ( ) , resolvedTypes ) ;
public class DateTimeFormatterBuilder { /** * Appends a string literal to the formatter . * This string will be output during a print . * If the literal is empty , nothing is added to the formatter . * @ param literal the literal to append , not null * @ return this , for chaining , not null */ public DateTimeFormatterBuilder appendLiteral ( String literal ) { } }
Jdk8Methods . requireNonNull ( literal , "literal" ) ; if ( literal . length ( ) > 0 ) { if ( literal . length ( ) == 1 ) { appendInternal ( new CharLiteralPrinterParser ( literal . charAt ( 0 ) ) ) ; } else { appendInternal ( new StringLiteralPrinterParser ( literal ) ) ; } } return this ;
public class PendingReplicationBlocks { /** * Returns a list of blocks that have timed out their * replication requests . Returns null if no blocks have * timed out . */ BlockInfo [ ] getTimedOutBlocks ( ) { } }
synchronized ( timedOutItems ) { if ( timedOutItems . size ( ) <= 0 ) { return null ; } BlockInfo [ ] blockList = timedOutItems . toArray ( new BlockInfo [ timedOutItems . size ( ) ] ) ; timedOutItems . clear ( ) ; return blockList ; }
public class Builder { /** * Returns a new { @ link GeoShape . Intersection } , representing the intersection of the specified WKT shapes . * @ param shapes the shapes to be intersected * @ return a new intersection transformation */ public static GeoShape . Intersection intersection ( String ... shapes ) { } }
return intersection ( Stream . of ( shapes ) . map ( Builder :: wkt ) . collect ( Collectors . toList ( ) ) ) ;
public class ServletContextImpl { /** * Gets the session with the specified ID if it exists * @ param sessionId The session ID * @ return The session */ public HttpSessionImpl getSession ( final String sessionId ) { } }
final SessionManager sessionManager = deployment . getSessionManager ( ) ; Session session = sessionManager . getSession ( sessionId ) ; if ( session != null ) { return SecurityActions . forSession ( session , this , false ) ; } return null ;
public class ApiOvhPrice { /** * Get price of zone options * REST : GET / price / domain / zone / option / { optionName } * @ param optionName [ required ] Option */ public OvhPrice domain_zone_option_optionName_GET ( net . minidev . ovh . api . price . domain . zone . OvhOptionEnum optionName ) throws IOException { } }
String qPath = "/price/domain/zone/option/{optionName}" ; StringBuilder sb = path ( qPath , optionName ) ; String resp = exec ( qPath , "GET" , sb . toString ( ) , null ) ; return convertTo ( resp , OvhPrice . class ) ;
public class TaskTracker { /** * Get the number of currently available slots on this tasktracker for the * given type of the task . * @ param taskType the { @ link TaskType } to check for number of available slots * @ return the number of currently available slots for the given * < code > taskType < / code > */ public int getAvailableSlots ( TaskType taskType ) { } }
int availableSlots = 0 ; if ( taskType == TaskType . MAP ) { if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( trackerName + " getAvailSlots:" + " max(m)=" + status . getMaxMapSlots ( ) + " occupied(m)=" + status . countOccupiedMapSlots ( ) ) ; } availableSlots = status . getAvailableMapSlots ( ) ; } else { if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( trackerName + " getAvailSlots:" + " max(r)=" + status . getMaxReduceSlots ( ) + " occupied(r)=" + status . countOccupiedReduceSlots ( ) ) ; } availableSlots = status . getAvailableReduceSlots ( ) ; } return availableSlots ;
public class LicenseConfigurationMarshaller { /** * Marshall the given parameter object . */ public void marshall ( LicenseConfiguration licenseConfiguration , ProtocolMarshaller protocolMarshaller ) { } }
if ( licenseConfiguration == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( licenseConfiguration . getLicenseConfigurationId ( ) , LICENSECONFIGURATIONID_BINDING ) ; protocolMarshaller . marshall ( licenseConfiguration . getLicenseConfigurationArn ( ) , LICENSECONFIGURATIONARN_BINDING ) ; protocolMarshaller . marshall ( licenseConfiguration . getName ( ) , NAME_BINDING ) ; protocolMarshaller . marshall ( licenseConfiguration . getDescription ( ) , DESCRIPTION_BINDING ) ; protocolMarshaller . marshall ( licenseConfiguration . getLicenseCountingType ( ) , LICENSECOUNTINGTYPE_BINDING ) ; protocolMarshaller . marshall ( licenseConfiguration . getLicenseRules ( ) , LICENSERULES_BINDING ) ; protocolMarshaller . marshall ( licenseConfiguration . getLicenseCount ( ) , LICENSECOUNT_BINDING ) ; protocolMarshaller . marshall ( licenseConfiguration . getLicenseCountHardLimit ( ) , LICENSECOUNTHARDLIMIT_BINDING ) ; protocolMarshaller . marshall ( licenseConfiguration . getConsumedLicenses ( ) , CONSUMEDLICENSES_BINDING ) ; protocolMarshaller . marshall ( licenseConfiguration . getStatus ( ) , STATUS_BINDING ) ; protocolMarshaller . marshall ( licenseConfiguration . getOwnerAccountId ( ) , OWNERACCOUNTID_BINDING ) ; protocolMarshaller . marshall ( licenseConfiguration . getConsumedLicenseSummaryList ( ) , CONSUMEDLICENSESUMMARYLIST_BINDING ) ; protocolMarshaller . marshall ( licenseConfiguration . getManagedResourceSummaryList ( ) , MANAGEDRESOURCESUMMARYLIST_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class AkkaRpcActor { /** * Send throwable to sender if the sender is specified . * @ param throwable to send to the sender */ protected void sendErrorIfSender ( Throwable throwable ) { } }
if ( ! getSender ( ) . equals ( ActorRef . noSender ( ) ) ) { getSender ( ) . tell ( new Status . Failure ( throwable ) , getSelf ( ) ) ; }
public class HyperBoundingBox { /** * The object implements the writeExternal method to save its contents by * calling the methods of DataOutput for its primitive values or calling the * writeObject method of ObjectOutput for objects , strings , and arrays . * @ param out the stream to write the object to * @ throws java . io . IOException Includes any I / O exceptions that may occur * @ serialData Overriding methods should use this tag to describe the data * layout of this Externalizable object . List the sequence of * element types and , if possible , relate the element to a * public / protected field and / or method of this Externalizable * class . */ @ Override public void writeExternal ( ObjectOutput out ) throws IOException { } }
int dim = getDimensionality ( ) ; out . writeInt ( dim ) ; for ( double aMin : min ) { out . writeDouble ( aMin ) ; } for ( double aMax : max ) { out . writeDouble ( aMax ) ; }
public class CommerceSubscriptionEntryPersistenceImpl { /** * Removes all the commerce subscription entries where groupId = & # 63 ; and userId = & # 63 ; from the database . * @ param groupId the group ID * @ param userId the user ID */ @ Override public void removeByG_U ( long groupId , long userId ) { } }
for ( CommerceSubscriptionEntry commerceSubscriptionEntry : findByG_U ( groupId , userId , QueryUtil . ALL_POS , QueryUtil . ALL_POS , null ) ) { remove ( commerceSubscriptionEntry ) ; }
public class GreenPepperServerConfigurationActivator { /** * < p > startup . < / p > * @ param forceStartup a boolean . * @ throws com . greenpepper . server . GreenPepperServerException if any . */ public void startup ( boolean forceStartup ) throws GreenPepperServerException { } }
log . info ( "Starting Plugin" ) ; if ( ! isPluginEnabled ) return ; final GreenPepperServerConfiguration configuration = getConfiguration ( ) ; if ( ( configuration . isSetupComplete ( ) && ! isServerStarted ) || forceStartup ) { isServerStarted = false ; try { closeSession ( ) ; Properties properties = configuration . getProperties ( ) ; injectAdditionalProperties ( properties ) ; HibernateSessionService sessionService = new HibernateSessionService ( properties ) ; log . info ( "Boostrapping datas" ) ; new BootstrapData ( sessionService , properties ) . execute ( ) ; new GreenPepperUserGroup ( ) . createIfNeeded ( ) ; Authorizer authorizer = new OpenSourceAuthorizer ( sessionService , properties ) ; authorizer . initialize ( GreenPepperServer . versionDate ( ) ) ; ProjectDao projectDao = new HibernateProjectDao ( sessionService ) ; RepositoryDao repositoryDao = new HibernateRepositoryDao ( sessionService ) ; SystemUnderTestDao sutDao = new HibernateSystemUnderTestDao ( sessionService ) ; DocumentDao documentDao = new HibernateDocumentDao ( sessionService ) ; Object object = ContainerManager . getComponent ( "greenPepperServerService" ) ; GreenPepperServerServiceImpl service = ( GreenPepperServerServiceImpl ) object ; service . setAuthorizer ( authorizer ) ; service . setDocumentDao ( documentDao ) ; service . setProjectDao ( projectDao ) ; service . setRepositoryDao ( repositoryDao ) ; service . setSessionService ( sessionService ) ; service . setSutDao ( sutDao ) ; object = ContainerManager . getComponent ( "greenPepperXmlRpcServerService" ) ; GreenPepperXmlRpcServer xmlRpcServer = ( GreenPepperXmlRpcServer ) object ; xmlRpcServer . setService ( service ) ; hibernateSessionService = sessionService ; configuration . setSetupComplete ( true ) ; storeConfiguration ( configuration ) ; isServerStarted = true ; } catch ( Exception ex ) { log . error ( "Starting up GreenPepper plugin" , ex ) ; throw new GreenPepperServerException ( GreenPepperServerErrorKey . GENERAL_ERROR , ex ) ; } }
public class WebAppServlet { /** * Add a init param for filter . * @ param param to be added * @ throws NullArgumentException if param , param name , param value is null */ public void addInitParam ( final WebAppInitParam param ) { } }
NullArgumentException . validateNotNull ( param , "Init param" ) ; NullArgumentException . validateNotNull ( param . getParamName ( ) , "Init param name" ) ; NullArgumentException . validateNotNull ( param . getParamValue ( ) , "Init param value" ) ; initParams . add ( param ) ;
public class Functions { /** * Runs random string function with arguments . * @ param numberOfLetters * @ param notationMethod * @ param useNumbers * @ return */ public static String randomString ( Long numberOfLetters , String notationMethod , boolean useNumbers , TestContext context ) { } }
return new RandomStringFunction ( ) . execute ( Arrays . asList ( String . valueOf ( numberOfLetters ) , notationMethod , String . valueOf ( useNumbers ) ) , context ) ;
public class UpdateTypedLinkFacetRequest { /** * The order of identity attributes for the facet , from most significant to least significant . The ability to filter * typed links considers the order that the attributes are defined on the typed link facet . When providing ranges to * a typed link selection , any inexact ranges must be specified at the end . Any attributes that do not have a range * specified are presumed to match the entire range . Filters are interpreted in the order of the attributes on the * typed link facet , not the order in which they are supplied to any API calls . For more information about identity * attributes , see < a href = * " https : / / docs . aws . amazon . com / clouddirectory / latest / developerguide / directory _ objects _ links . html # directory _ objects _ links _ typedlink " * > Typed Links < / a > . * @ param identityAttributeOrder * The order of identity attributes for the facet , from most significant to least significant . The ability to * filter typed links considers the order that the attributes are defined on the typed link facet . When * providing ranges to a typed link selection , any inexact ranges must be specified at the end . Any * attributes that do not have a range specified are presumed to match the entire range . Filters are * interpreted in the order of the attributes on the typed link facet , not the order in which they are * supplied to any API calls . For more information about identity attributes , see < a href = * " https : / / docs . aws . amazon . com / clouddirectory / latest / developerguide / directory _ objects _ links . html # directory _ objects _ links _ typedlink " * > Typed Links < / a > . */ public void setIdentityAttributeOrder ( java . util . Collection < String > identityAttributeOrder ) { } }
if ( identityAttributeOrder == null ) { this . identityAttributeOrder = null ; return ; } this . identityAttributeOrder = new java . util . ArrayList < String > ( identityAttributeOrder ) ;
public class ConfigImpl { /** * Resolves has ! loader plugin expressions by calling _ resolve ( ) on each of the modules named * in the expression . Optionally evaluates the feature conditionals in the expression , * potentially simplifying or eliminating the conditionals . * @ param expression * The expression to resolve , excluding the has plugin name * ( e . g . feature ? moduleA : moduleB ) * @ param features * Features that are defined in the request * @ param dependentFeatures * Output - Set of feature names that the returned value is * conditioned on . Used for cache management . * @ param resolveAliases * If true , then module name aliases will be resolved * @ param evaluateHasPluginConditionals * If true , then attempt to evaluate the has plugin conditionals using the features * provided in < code > features < / code > , potentially eliminating the has ! loader plugin * from the returned value if all features can be resolved . If false , then the * conditionals are retained in the result , although the expression may change * if new conditionals are introduced by alias resolution . * @ param sb * If not null , then a reference to a string buffer that can * be used by the resolver to indicate debug / diagnostic information * about the alias resolution . For example , the resolver may * indicate that alias resolution was not performed due to * a missing required feature . * @ param recursionCount * Counter used to guard against runaway recursion * @ return The module id with has ! loader plugin resolved , or { @ code mid } if the * features specified by the loader plugin are not defined . */ protected String resolveHasPlugin ( String expression , Features features , Set < String > dependentFeatures , boolean resolveAliases , boolean evaluateHasPluginConditionals , int recursionCount , StringBuffer sb ) { } }
final String sourceMethod = "resolveHasPlugin" ; // $ NON - NLS - 1 $ boolean isTraceLogging = log . isLoggable ( Level . FINER ) ; if ( isTraceLogging ) { log . entering ( ConfigImpl . class . getName ( ) , sourceMethod , new Object [ ] { expression , features , dependentFeatures , resolveAliases , evaluateHasPluginConditionals , recursionCount , sb } ) ; } HasNode hasNode = new HasNode ( expression ) . resolve ( evaluateHasPluginConditionals ? features : Features . emptyFeatures , dependentFeatures , isCoerceUndefinedToFalse ( ) ) ; Collection < HasNode > nodes = new ArrayList < HasNode > ( ) ; Set < String > depFeatures = new HashSet < String > ( ) ; hasNode . gatherEndpoints ( nodes ) ; for ( HasNode node : nodes ) { String replacement = _resolve ( node . getNodeName ( ) , features , depFeatures , resolveAliases , evaluateHasPluginConditionals , recursionCount , sb ) ; dependentFeatures . addAll ( depFeatures ) ; // If a has ! loader plugin expressions was introduced by alias resolution , then // create a new HasNode for the expression and replace the current node with the new // node . Otherwise , replace the current node with the new module id . int idx = replacement . indexOf ( "!" ) ; // $ NON - NLS - 1 $ if ( idx != - 1 && HAS_PATTERN . matcher ( replacement . substring ( 0 , idx ) ) . find ( ) ) { node . replaceWith ( new HasNode ( replacement . substring ( idx + 1 ) ) ) ; } else { node . replaceWith ( replacement ) ; } } if ( sb != null && evaluateHasPluginConditionals ) { Map < String , Boolean > featureMap = new HashMap < String , Boolean > ( ) ; if ( ! expression . equals ( hasNode . toString ( ) ) ) { for ( String featureName : depFeatures ) { if ( features . contains ( featureName ) || isCoerceUndefinedToFalse ( ) ) { featureMap . put ( featureName , features . isFeature ( featureName ) ) ; } } sb . append ( ", " ) . append ( MessageFormat . format ( // $ NON - NLS - 1 $ Messages . ConfigImpl_2 , new Object [ ] { hasNode . toString ( ) + featureMap . toString ( ) } ) ) ; } if ( isCoerceUndefinedToFalse ( ) ) { // determine the missing feature . Should be the only one left in depFeatures // after removing the request features . depFeatures . removeAll ( features . featureNames ( ) ) ; if ( ! depFeatures . isEmpty ( ) ) { sb . append ( ", " ) . append ( MessageFormat . format ( // $ NON - NLS - 1 $ Messages . ConfigImpl_4 , new Object [ ] { depFeatures . toString ( ) } ) ) ; } } } if ( isTraceLogging ) { log . exiting ( ConfigImpl . class . getName ( ) , sourceMethod , hasNode . toString ( ) ) ; } return hasNode . toString ( ) ;
public class ClassByExtensionBenchmark { /** * Performs a benchmark of a class extension using Byte Buddy . This benchmark also uses the annotation - based approach * but creates delegation methods which do not require the creation of additional classes . This benchmark uses a type * pool to compare against usage of the reflection API . * @ return The created instance , in order to avoid JIT removal . * @ throws Exception If the invocation causes an exception . */ @ Benchmark public ExampleClass benchmarkByteBuddyWithAccessorWithTypePool ( ) throws Exception { } }
return ( ExampleClass ) new ByteBuddy ( ) . with ( TypeValidation . DISABLED ) . ignore ( none ( ) ) . subclass ( baseClassDescription ) . method ( isDeclaredBy ( baseClassDescription ) ) . intercept ( MethodDelegation . to ( accessClassDescription ) ) . make ( ) . load ( newClassLoader ( ) , ClassLoadingStrategy . Default . INJECTION ) . getLoaded ( ) . getDeclaredConstructor ( ) . newInstance ( ) ;
public class InjectionProcessor { /** * Add the InjectionBinding to the annotationCollection . The collection will be used * later when binding and resolving injection targets . * @ param injectionBinding */ public final void addInjectionBinding ( InjectionBinding < A > injectionBinding ) { } }
final boolean isTraceOn = TraceComponent . isAnyTracingEnabled ( ) ; if ( isTraceOn && tc . isDebugEnabled ( ) ) Tr . debug ( tc , "addInjectionBinding: " + injectionBinding ) ; // jndi name not found in collection , simple add ivAllAnnotationsCollection . put ( injectionBinding . getJndiName ( ) , injectionBinding ) ;
public class MtasDataLongAdvanced { /** * ( non - Javadoc ) * @ see * mtas . codec . util . DataCollector . MtasDataCollector # minimumForComputingSegment ( * java . lang . Number , java . lang . Number ) */ @ Override protected Long lastForComputingSegment ( Long value , Long boundary ) throws IOException { } }
if ( segmentRegistration . equals ( SEGMENT_SORT_ASC ) || segmentRegistration . equals ( SEGMENT_BOUNDARY_ASC ) ) { return Math . max ( value , boundary ) ; } else if ( segmentRegistration . equals ( SEGMENT_SORT_DESC ) || segmentRegistration . equals ( SEGMENT_BOUNDARY_DESC ) ) { return Math . min ( value , boundary ) ; } else { throw new IOException ( "can't compute last for segmentRegistration " + segmentRegistration ) ; }
public class DEBBuilder { /** * Add debian / control Provides field . * @ param name * @ param version Not used * @ param dependency Not used * @ return */ @ Override public DEBBuilder addProvide ( String name , String version , Condition ... dependency ) { } }
control . addProvides ( name ) ; return this ;
public class ConfigurationFile { /** * Finds a single file in the snapshots directory whose name starts with { @ code prefix } and * returns its name with the prefix removed . * @ param prefix the prefix * @ return the single file that meets the criterion { @ code null } if none do * @ throws IllegalStateException if more than one file meets the criteria */ private String findMainFileFromSnapshotPrefix ( final String prefix ) { } }
File [ ] files = null ; if ( snapshotsDirectory . exists ( ) && snapshotsDirectory . isDirectory ( ) ) { files = snapshotsDirectory . listFiles ( new FilenameFilter ( ) { @ Override public boolean accept ( File dir , String name ) { return name . startsWith ( prefix ) && SNAPSHOT_XML . matcher ( name ) . find ( ) ; } } ) ; } if ( files == null || files . length == 0 ) { return null ; } else if ( files . length > 1 ) { throw ControllerLogger . ROOT_LOGGER . ambiguousConfigurationFiles ( prefix , snapshotsDirectory , prefix ) ; } String matchName = files [ 0 ] . getName ( ) ; return matchName . substring ( TIMESTAMP_FORMAT . length ( ) ) ;
public class AlipayService { /** * MD5验证 */ public Boolean notifyVerifyMd5 ( Map < String , String > params ) { } }
return alipay . verify ( ) . md5 ( params ) ;
public class ComponentExposedTypeGenerator { /** * Create and return the builder for the Proto of our { @ link IsVueComponent } . * @ return A Builder to build the Proto class */ private Builder createProtoClassBuilder ( ) { } }
componentExposedTypeBuilder . addField ( FieldSpec . builder ( ClassName . bestGuess ( "Proto" ) , "__proto__" , Modifier . PUBLIC ) . addAnnotation ( JsProperty . class ) . build ( ) ) ; return TypeSpec . classBuilder ( "Proto" ) . addSuperinterface ( ParameterizedTypeName . get ( JsPropertyMap . class , Object . class ) ) . addModifiers ( Modifier . STATIC ) . addModifiers ( Modifier . PRIVATE ) . addAnnotation ( AnnotationSpec . builder ( JsType . class ) . addMember ( "isNative" , "$L" , true ) . addMember ( "namespace" , "$T.GLOBAL" , JsPackage . class ) . addMember ( "name" , "$S" , "Object" ) . build ( ) ) ;
public class KarafDistributionOption { /** * This option allows to extend configurations in each configuration file based on the * karaf . home location . The value extends the current value ( e . g . a = b to a = a , b ) instead of * replacing it . If there is no current value it is added . * If you would like to have add or replace functionality please use the * { @ link KarafDistributionConfigurationFilePutOption } instead . * @ param configurationFilePath * configuration file path * @ param key * property key * @ param value * property value * @ return option */ public static Option editConfigurationFileExtend ( String configurationFilePath , String key , Object value ) { } }
return new KarafDistributionConfigurationFileExtendOption ( configurationFilePath , key , value ) ;
public class Stream { /** * Put the stream in try - catch to stop the back - end reading thread if error happens * < br / > * < code > * try ( Stream < Integer > stream = Stream . parallelZip ( a , b , zipFunction ) ) { * stream . forEach ( N : : println ) ; * < / code > * @ param a * @ param b * @ param c * @ param valueForNoneA * @ param valueForNoneB * @ param valueForNoneC * @ param zipFunction * @ return */ public static < A , B , C , R > Stream < R > parallelZip ( final Stream < A > a , final Stream < B > b , final Stream < C > c , final A valueForNoneA , final B valueForNoneB , final C valueForNoneC , final TriFunction < ? super A , ? super B , ? super C , R > zipFunction ) { } }
return parallelZip ( a , b , c , valueForNoneA , valueForNoneB , valueForNoneC , zipFunction , DEFAULT_QUEUE_SIZE_PER_ITERATOR ) ;
public class ModelConversion { /** * / * ( non - Javadoc ) * @ see eu . atos . sla . util . IModelConversion # getAgreementFromAgreementXML ( eu . atos . sla . datamodel . parser . xml . agreement . Agreement , java . lang . String ) */ @ Override public IAgreement getAgreementFromAgreementXML ( eu . atos . sla . parser . data . wsag . Agreement agreementXML , String payload ) throws ModelConversionException { } }
IAgreement agreement = new Agreement ( ) ; // AgreementId if ( agreementXML . getAgreementId ( ) != null ) { agreement . setAgreementId ( agreementXML . getAgreementId ( ) ) ; } // Context Context context = agreementXML . getContext ( ) ; try { ServiceProvider ctxProvider = ServiceProvider . fromString ( context . getServiceProvider ( ) ) ; switch ( ctxProvider ) { case AGREEMENT_RESPONDER : setProviderAndConsumer ( agreement , context . getAgreementResponder ( ) , context . getAgreementInitiator ( ) ) ; break ; case AGREEMENT_INITIATOR : setProviderAndConsumer ( agreement , context . getAgreementInitiator ( ) , context . getAgreementResponder ( ) ) ; break ; } } catch ( IllegalArgumentException e ) { throw new ModelConversionException ( "The Context/ServiceProvider field must match with the word " + ServiceProvider . AGREEMENT_RESPONDER + " or " + ServiceProvider . AGREEMENT_INITIATOR ) ; } if ( context . getTemplateId ( ) != null ) { eu . atos . sla . datamodel . bean . Template template = new eu . atos . sla . datamodel . bean . Template ( ) ; template . setUuid ( context . getTemplateId ( ) ) ; agreement . setTemplate ( template ) ; } if ( context . getService ( ) != null ) { agreement . setServiceId ( context . getService ( ) ) ; } else { throw new ModelConversionException ( "Service is null, field must be informed" ) ; } if ( context . getExpirationTime ( ) != null ) { agreement . setExpirationDate ( context . getExpirationTime ( ) ) ; } // ServiceProperties List < IServiceProperties > servicePropertiesList = new ArrayList < IServiceProperties > ( ) ; List < ServiceProperties > servicePropertiesListXML = agreementXML . getTerms ( ) . getAllTerms ( ) . getServiceProperties ( ) ; if ( servicePropertiesListXML == null ) { servicePropertiesListXML = Collections . < ServiceProperties > emptyList ( ) ; } for ( ServiceProperties servicePropertiesXML : servicePropertiesListXML ) { IServiceProperties serviceProperties = new eu . atos . sla . datamodel . bean . ServiceProperties ( ) ; if ( servicePropertiesXML . getName ( ) != null ) { serviceProperties . setName ( servicePropertiesXML . getName ( ) ) ; } if ( servicePropertiesXML . getServiceName ( ) != null ) { serviceProperties . setServiceName ( servicePropertiesXML . getServiceName ( ) ) ; } if ( servicePropertiesXML != null ) { serviceProperties . setServiceName ( servicePropertiesXML . getServiceName ( ) ) ; } // VariableSet if ( servicePropertiesXML . getVariableSet ( ) != null ) { List < IVariable > variables = new ArrayList < IVariable > ( ) ; List < Variable > variablesXML = servicePropertiesXML . getVariableSet ( ) . getVariables ( ) ; if ( variablesXML != null ) { for ( Variable variableXML : variablesXML ) { IVariable variable = new eu . atos . sla . datamodel . bean . Variable ( ) ; logger . debug ( "Variable with name:{} - location:{} - metric:{}" , variableXML . getName ( ) , variableXML . getLocation ( ) , variableXML . getMetric ( ) ) ; if ( variableXML . getLocation ( ) != null ) { variable . setLocation ( variableXML . getLocation ( ) ) ; } if ( variableXML . getMetric ( ) != null ) { variable . setMetric ( variableXML . getMetric ( ) ) ; } if ( variableXML . getName ( ) != null ) { variable . setName ( variableXML . getName ( ) ) ; } variables . add ( variable ) ; } serviceProperties . setVariableSet ( variables ) ; } } servicePropertiesList . add ( serviceProperties ) ; } agreement . setServiceProperties ( servicePropertiesList ) ; agreement . setName ( agreementXML . getName ( ) ) ; // GuaranteeTerms List < IGuaranteeTerm > guaranteeTerms = new ArrayList < IGuaranteeTerm > ( ) ; List < GuaranteeTerm > guaranteeTermsXML = agreementXML . getTerms ( ) . getAllTerms ( ) . getGuaranteeTerms ( ) ; if ( guaranteeTermsXML == null ) { guaranteeTermsXML = Collections . < GuaranteeTerm > emptyList ( ) ; } for ( GuaranteeTerm guaranteeTermXML : guaranteeTermsXML ) { IGuaranteeTerm guaranteeTerm = new eu . atos . sla . datamodel . bean . GuaranteeTerm ( ) ; if ( guaranteeTermXML . getName ( ) != null ) { guaranteeTerm . setName ( guaranteeTermXML . getName ( ) ) ; } ServiceScope scope = guaranteeTermXML . getServiceScope ( ) ; if ( scope != null ) { logger . debug ( "guaranteeTerm with name:{} - servicescopeName:{} - servicescopeValue:{}" , guaranteeTermXML . getName ( ) , scope . getServiceName ( ) , scope . getValue ( ) ) ; guaranteeTerm . setServiceScope ( scope . getValue ( ) ) ; guaranteeTerm . setServiceName ( scope . getServiceName ( ) ) ; } else logger . debug ( "guaranteeTerm with name:{} - serviceScope is null" , guaranteeTermXML . getName ( ) ) ; // qualifying condition if ( guaranteeTermXML . getQualifyingCondition ( ) != null ) { logger . debug ( "qualifying condition informed with:{}" , guaranteeTermXML . getQualifyingCondition ( ) ) ; String qc = guaranteeTermXML . getQualifyingCondition ( ) ; if ( qc != null ) { QualifyingConditionParser . Result parsedQc = QualifyingConditionParser . parse ( qc ) ; guaranteeTerm . setSamplingPeriodFactor ( parsedQc . getSamplingPeriodFactor ( ) ) ; if ( parsedQc . getSamplingPeriodFactor ( ) == IGuaranteeTerm . ENFORCED_AT_END ) { agreement . setHasGTermToBeEvaluatedAtEndOfEnformcement ( true ) ; } } } /* * Parse SLO and BusinessValues */ ServiceLevelObjective slo = guaranteeTermXML . getServiceLevelObjetive ( ) ; if ( slo . getKpitarget ( ) != null ) { if ( slo . getKpitarget ( ) . getKpiName ( ) != null ) { guaranteeTerm . setKpiName ( slo . getKpitarget ( ) . getKpiName ( ) ) ; String csl = slo . getKpitarget ( ) . getCustomServiceLevel ( ) ; logger . debug ( "guaranteeTerm with kpiname:{} -- getCustomServiceLevel: " , slo . getKpitarget ( ) . getKpiName ( ) , csl ) ; if ( csl != null ) { logger . debug ( "CustomServiceLevel not null" ) ; ServiceLevelParser . Result parsedSlo = ServiceLevelParser . parse ( csl ) ; guaranteeTerm . setServiceLevel ( parsedSlo . getConstraint ( ) ) ; } else { logger . debug ( "CustomServiceLevel is null" ) ; } } } guaranteeTerm . setBusinessValueList ( businessValueListParser . parse ( guaranteeTermXML ) ) ; guaranteeTerms . add ( guaranteeTerm ) ; } agreement . setGuaranteeTerms ( guaranteeTerms ) ; // Text agreement . setText ( payload ) ; return agreement ;
public class ClientImpl { /** * Join the given channel . Create a new channel object for this connection , * join the channel and return the IRCChannel object . * @ param channelName The channel to join * @ return the new IRCChannel object or null if the channel could not be * joined */ @ Override public Channel join ( String channelName ) { } }
if ( ! connected ) { throw new NotConnectedException ( ) ; } ChannelImpl channel = new ChannelImpl ( this , channelName ) ; /* Attempt to join */ if ( channel . join ( ) ) { return channel ; } /* Error while joining */ return null ;
public class FacesBackingBean { /** * Remove this instance from the session . */ public void removeFromSession ( HttpServletRequest request ) { } }
StorageHandler sh = Handlers . get ( getServletContext ( ) ) . getStorageHandler ( ) ; HttpServletRequest unwrappedRequest = PageFlowUtils . unwrapMultipart ( request ) ; RequestContext rc = new RequestContext ( unwrappedRequest , null ) ; String attrName = ScopedServletUtils . getScopedSessionAttrName ( InternalConstants . FACES_BACKING_ATTR , unwrappedRequest ) ; sh . removeAttribute ( rc , attrName ) ;
public class AlignedBox3f { /** * Replies the center point . * @ return the center point . */ @ Pure @ Override public Point3f getCenter ( ) { } }
return new Point3f ( ( this . minx + this . maxx ) / 2. , ( this . miny + this . maxy ) / 2. , ( this . minz + this . maxz ) / 2. ) ;
public class XMLDatabase { /** * Notifies the registered listeners of a change in value node ( value or status ) . * @ param valueNode the value node that has changed */ @ Override public void fireValueNodeChanged ( IValueNode valueNode ) { } }
for ( IDatabaseListener iDatabaseListener : listeners ) { iDatabaseListener . valueNodeChanged ( valueNode ) ; }
public class FacesScaffoldProvider { /** * Writes the entity Metawidget and its namespaces into the given context . */ protected void writeEntityMetawidget ( final Map < Object , Object > context , final int entityMetawidgetIndent , final Map < String , String > existingNamespaces ) { } }
StringWriter stringWriter = new StringWriter ( ) ; this . entityMetawidget . write ( stringWriter , entityMetawidgetIndent ) ; context . put ( "metawidget" , stringWriter . toString ( ) . trim ( ) ) ; Map < String , String > namespaces = this . entityMetawidget . getNamespaces ( ) ; namespaces . keySet ( ) . removeAll ( existingNamespaces . keySet ( ) ) ; context . put ( "metawidgetNamespaces" , namespacesToString ( namespaces ) ) ;
public class StatsServlet { /** * @ throws ExecutorManagerException */ private void handleStatePageLoad ( final HttpServletRequest req , final HttpServletResponse resp , final Session session ) throws ServletException { } }
final Page page = newPage ( req , resp , session , "azkaban/webapp/servlet/velocity/statsPage.vm" ) ; try { final Collection < Executor > executors = this . execManagerAdapter . getAllActiveExecutors ( ) ; page . add ( "executorList" , executors ) ; if ( executors . isEmpty ( ) ) { throw new ExecutorManagerException ( "Executor list is empty." ) ; } final Map < String , Object > result = this . execManagerAdapter . callExecutorStats ( executors . iterator ( ) . next ( ) . getId ( ) , ConnectorParams . STATS_GET_ALLMETRICSNAME , ( Pair < String , String > [ ] ) null ) ; if ( result . containsKey ( ConnectorParams . RESPONSE_ERROR ) ) { page . add ( "errorMsg" , result . get ( ConnectorParams . RESPONSE_ERROR ) . toString ( ) ) ; } else { page . add ( "metricList" , result . get ( "data" ) ) ; } } catch ( final Exception e ) { logger . error ( e . getMessage ( ) , e ) ; page . add ( "errorMsg" , "Failed to get a response from Azkaban exec server" ) ; } page . render ( ) ;
public class vrid6_binding { /** * Use this API to fetch vrid6 _ binding resource of given name . */ public static vrid6_binding get ( nitro_service service , Long id ) throws Exception { } }
vrid6_binding obj = new vrid6_binding ( ) ; obj . set_id ( id ) ; vrid6_binding response = ( vrid6_binding ) obj . get_resource ( service ) ; return response ;
public class Introspected { /** * Get the delimited column name for the specified property name , or { @ code null } if * no such property exists . * CLARIFY Must be public ? * @ return the delimited column name or { @ code null } */ public String getColumnNameForProperty ( final String propertyName ) { } }
return Optional . ofNullable ( propertyToField . get ( propertyName ) ) . map ( fcInfo -> fcInfo . getDelimitedColumnName ( ) ) . orElse ( null ) ;
public class DeleteMarkerReplication { /** * Sets the replication status for delete markers . Delete markers are not replicated if status is Disabled . * @ param status New replication status . * @ return This object for method chaining . */ public DeleteMarkerReplication withStatus ( DeleteMarkerReplicationStatus status ) { } }
setStatus ( status == null ? null : status . toString ( ) ) ; return this ;
public class AntXmlReport { private List < PropertyModel > buildModel ( Map < String , String > properties ) { } }
List < PropertyModel > props = new ArrayList < > ( ) ; for ( Map . Entry < String , String > e : properties . entrySet ( ) ) { props . add ( new PropertyModel ( e . getKey ( ) , e . getValue ( ) ) ) ; } return props ;
public class ParserME { /** * Creates a n - gram dictionary from the specified data stream using the specified head rule and specified cut - off . * @ param data The data stream of parses . * @ param rules The head rules for the parses . * @ param cutoff The minimum number of entries required for the n - gram to be saved as part of the dictionary . * @ return A dictionary object . */ private static MutableDictionary buildDictionary ( DataStream data , HeadRules rules , int cutoff ) { } }
MutableDictionary mdict = new MutableDictionary ( cutoff ) ; while ( data . hasNext ( ) ) { String parseStr = ( String ) data . nextToken ( ) ; Parse p = Parse . parseParse ( parseStr ) ; p . updateHeads ( rules ) ; Parse [ ] pwords = p . getTagNodes ( ) ; String [ ] words = new String [ pwords . length ] ; // add all uni - grams for ( int wi = 0 ; wi < words . length ; wi ++ ) { words [ wi ] = pwords [ wi ] . toString ( ) ; } mdict . add ( words , 1 , true ) ; // add tri - grams and bi - grams for inital sequence Parse [ ] chunks = collapsePunctuation ( ParserEventStream . getInitialChunks ( p ) , rules . getPunctuationTags ( ) ) ; String [ ] cwords = new String [ chunks . length ] ; for ( int wi = 0 ; wi < cwords . length ; wi ++ ) { cwords [ wi ] = chunks [ wi ] . getHead ( ) . toString ( ) ; } mdict . add ( cwords , 3 , false ) ; // emulate reductions to produce additional n - grams int ci = 0 ; while ( ci < chunks . length ) { // System . err . println ( " chunks [ " + ci + " ] = " + chunks [ ci ] . getHead ( ) . toString ( ) + " chunks . length = " + chunks . length ) ; if ( lastChild ( chunks [ ci ] , chunks [ ci ] . getParent ( ) , rules . getPunctuationTags ( ) ) ) { // perform reduce int reduceStart = ci ; while ( reduceStart >= 0 && chunks [ reduceStart ] . getParent ( ) == chunks [ ci ] . getParent ( ) ) { reduceStart -- ; } reduceStart ++ ; chunks = ParserEventStream . reduceChunks ( chunks , ci , chunks [ ci ] . getParent ( ) ) ; ci = reduceStart ; if ( chunks . length != 0 ) { String [ ] window = new String [ 5 ] ; int wi = 0 ; if ( ci - 2 >= 0 ) window [ wi ++ ] = chunks [ ci - 2 ] . getHead ( ) . toString ( ) ; if ( ci - 1 >= 0 ) window [ wi ++ ] = chunks [ ci - 1 ] . getHead ( ) . toString ( ) ; window [ wi ++ ] = chunks [ ci ] . getHead ( ) . toString ( ) ; if ( ci + 1 < chunks . length ) window [ wi ++ ] = chunks [ ci + 1 ] . getHead ( ) . toString ( ) ; if ( ci + 2 < chunks . length ) window [ wi ++ ] = chunks [ ci + 2 ] . getHead ( ) . toString ( ) ; if ( wi < 5 ) { String [ ] subWindow = new String [ wi ] ; for ( int swi = 0 ; swi < wi ; swi ++ ) { subWindow [ swi ] = window [ swi ] ; } window = subWindow ; } if ( window . length >= 3 ) { mdict . add ( window , 3 , false ) ; } else if ( window . length == 2 ) { mdict . add ( window , 2 , false ) ; } } ci = reduceStart - 1 ; // ci will be incremented at end of loop } ci ++ ; } } return mdict ;
public class GlobalObjectPool { /** * 获取对象 * @ param id 对象的id , 可以是任何全局唯一的标示符 * @ param < T > 对象类型 * @ return 对象 */ public synchronized static < T > T get ( Object id ) { } }
SoftReference reference = pool . get ( id ) ; if ( reference == null ) return null ; return ( T ) reference . get ( ) ;
public class LegacySpy { /** * Alias for { @ link # expectBetween ( int , int , Threads , Query ) } with arguments { @ code allowedStatements } , { @ code allowedStatements } , { @ code threads } , { @ link Query # ANY } * @ since 2.0 */ @ Deprecated public C expect ( int allowedStatements , Threads threadMatcher ) { } }
return expect ( SqlQueries . exactQueries ( allowedStatements ) . threads ( threadMatcher ) ) ;
public class GridRecordMessageFilter { /** * Update this filter with this new information . * Here , I ' m looking for a new bookmark property ( objectID ) . * Remember to call super after updating this filter , as this method updates the remote copy of this filter . * @ param properties New filter information ( ie , bookmark = 345 ) . */ public Object [ ] [ ] createNameValueTree ( Object [ ] [ ] mxProperties , Map < String , Object > properties ) { } }
return super . createNameValueTree ( mxProperties , properties ) ;
public class WbEditingAction { /** * Executes the API action " wbeditentity " for the given parameters . Created * or modified items are returned as a result . In particular , this is * relevant to find out about the id assigned to a newly created entity . * Unless the parameter clear is true , data of existing entities will be * modified or added , but not deleted . For labels , descriptions , and * aliases , this happens by language . In particular , if an item has English * and German aliases , and an edit action writes a new English alias , then * this new alias will replace all previously existing English aliases , * while the German aliases will remain untouched . In contrast , adding * statements for a certain property will not delete existing statements of * this property . In fact , it is even possible to create many copies of the * exact same statement . A special JSON syntax exists for deleting specific * statements . * See the < a href = * " https : / / www . wikidata . org / w / api . php ? action = help & modules = wbeditentity " * > online API documentation < / a > for further information . * TODO : There is currently no way to delete the label , description , or * aliases for a particular language without clearing all data . Empty * strings are not accepted . One might achieve this by adapting the JSON * serialization to produce null values for such strings , and for alias * lists that contain only such strings . * @ param id * the id of the entity to be edited ; if used , the site and title * parameters must be null * @ param site * when selecting an entity by title , the site key for the title , * e . g . , " enwiki " ; if used , title must also be given but id must * be null * @ param title * string used to select an entity by title ; if used , site must * also be given but id must be null * @ param newEntity * used for creating a new entity of a given type ; the value * indicates the intended entity type ; possible values include * " item " and " property " ; if used , the parameters id , site , and * title must be null * @ param data * JSON representation of the data that is to be written ; this is * a mandatory parameter * @ param clear * if true , existing data will be cleared ( deleted ) before * writing the new data * @ param bot * if true , edits will be flagged as " bot edits " provided that * the logged in user is in the bot group ; for regular users , the * flag will just be ignored * @ param baserevid * the revision of the data that the edit refers to or 0 if this * should not be submitted ; when used , the site will ensure that * no edit has happened since this revision to detect edit * conflicts ; it is recommended to use this whenever in all * operations where the outcome depends on the state of the * online data * @ param summary * summary for the edit ; will be prepended by an automatically * generated comment ; the length limit of the autocomment * together with the summary is 260 characters : everything above * that limit will be cut off * @ return the JSON response as returned by the API * @ throws IOException * if there was an IO problem . such as missing network * connection * @ throws MediaWikiApiErrorException * if the API returns an error */ public EntityDocument wbEditEntity ( String id , String site , String title , String newEntity , String data , boolean clear , boolean bot , long baserevid , String summary ) throws IOException , MediaWikiApiErrorException { } }
Validate . notNull ( data , "Data parameter cannot be null when editing entity data" ) ; Map < String , String > parameters = new HashMap < String , String > ( ) ; parameters . put ( "data" , data ) ; if ( clear ) { parameters . put ( "clear" , "" ) ; } JsonNode response = performAPIAction ( "wbeditentity" , id , site , title , newEntity , parameters , summary , baserevid , bot ) ; return getEntityDocumentFromResponse ( response ) ;
public class Historical1SimpleDoubleAggPooledTopNScannerPrototype { /** * Any changes to this method should be coordinated with { @ link TopNUtils } , { @ link * PooledTopNAlgorithm # computeSpecializedScanAndAggregateImplementations } and downstream methods . * It should be checked with a tool like https : / / github . com / AdoptOpenJDK / jitwatch that C2 compiler output for this * method doesn ' t have any method calls in the while loop , i . e . all method calls are inlined . To be able to see * assembly of this method in JITWatch and other similar tools , { @ link * PooledTopNAlgorithm # specializeHistorical1SimpleDoubleAggPooledTopN } should be turned off . Note that in this case * the benchmark should be " naturally monomorphic " , i . e . execute this method always with the same runtime shape . * If the while loop contains not inlined method calls , it should be considered as a performance bug . */ @ Override public long scanAndAggregate ( HistoricalDimensionSelector dimensionSelector , HistoricalColumnSelector metricSelector , SimpleDoubleBufferAggregator aggregator , int aggregatorSize , HistoricalCursor cursor , int [ ] positions , ByteBuffer resultsBuffer ) { } }
// See TopNUtils . copyOffset ( ) for explanation Offset offset = ( Offset ) TopNUtils . copyOffset ( cursor ) ; long processedRows = 0 ; int positionToAllocate = 0 ; while ( offset . withinBounds ( ) && ! Thread . currentThread ( ) . isInterrupted ( ) ) { int rowNum = offset . getOffset ( ) ; double metric = metricSelector . getDouble ( rowNum ) ; final IndexedInts dimValues = dimensionSelector . getRow ( rowNum ) ; final int dimSize = dimValues . size ( ) ; for ( int i = 0 ; i < dimSize ; i ++ ) { int dimIndex = dimValues . get ( i ) ; int position = positions [ dimIndex ] ; if ( position >= 0 ) { aggregator . aggregate ( resultsBuffer , position , metric ) ; } else if ( position == TopNAlgorithm . INIT_POSITION_VALUE ) { positions [ dimIndex ] = positionToAllocate ; aggregator . putFirst ( resultsBuffer , positionToAllocate , metric ) ; positionToAllocate += aggregatorSize ; } } processedRows ++ ; offset . increment ( ) ; } return processedRows ;
public class AsciiSet { /** * Returns a new set that will match characters that are not included this set . */ public AsciiSet invert ( ) { } }
final boolean [ ] invertMembers = new boolean [ 128 ] ; for ( int i = 0 ; i < invertMembers . length ; ++ i ) { invertMembers [ i ] = ! members [ i ] ; } return new AsciiSet ( invertMembers ) ;
public class ResourcePath { /** * Assumes { @ code path } is already normalized via { @ link # normalizePath ( String ) } . * @ param path * Path from which components are extracted * @ param strategy * How to perform the extraction ( literal or look for placeholders ) * @ return Logical components of the supplied { @ code path } */ protected List < Component > extractComponents ( String path , ComponentParsingStrategy strategy ) { } }
requireNonNull ( path ) ; requireNonNull ( strategy ) ; if ( "/" . equals ( path ) ) return emptyList ( ) ; // Strip off leading / path = path . substring ( 1 ) ; List < String > values = asList ( path . split ( "/" ) ) ; boolean checkForPlaceholder = strategy == ComponentParsingStrategy . FROM_DECLARATION ; return values . stream ( ) . map ( value -> { if ( checkForPlaceholder ) { ComponentType type = ComponentType . LITERAL ; if ( checkForPlaceholder && COMPONENT_PLACEHOLDER_PATTERN . matcher ( value ) . matches ( ) ) { type = ComponentType . PLACEHOLDER ; value = value . substring ( 1 , value . length ( ) - 1 ) ; } return new Component ( value , type ) ; } else { return new Component ( value , ComponentType . LITERAL ) ; } } ) . collect ( toList ( ) ) ;
public class Type { /** * < p > newInstanceUsingCoercion . < / p > * @ param args a { @ link java . lang . String } object . * @ return a T object . * @ throws java . lang . Throwable if any . */ public T newInstanceUsingCoercion ( String ... args ) throws Throwable { } }
Constructor < ? extends T > constructor = ClassUtils . findPossibleConstructor ( klass , args ) ; Class [ ] types = constructor . getParameterTypes ( ) ; return newInstance ( TypeConversion . convert ( args , types ) ) ;
public class AbstractWComponent { /** * { @ inheritDoc } */ @ Override public Environment getEnvironment ( ) { } }
UIContext uic = UIContextHolder . getCurrent ( ) ; return uic == null ? null : uic . getEnvironment ( ) ;
public class AESFastEngine { /** * The following defines provide alternative definitions of FFmulX that might * give improved performance if a fast 32 - bit multiply is not available . * private int FFmulX ( int x ) { int u = x & m1 ; u | = ( u > > 1 ) ; return ( ( x & m2 ) < < 1 ) ^ ( ( u > > > 3 ) | ( u > > > 6 ) ) ; } * private static final int m4 = 0x1b1b1b1b ; * private int FFmulX ( int x ) { int u = x & m1 ; return ( ( x & m2 ) < < 1 ) ^ ( ( u - ( u > > > 7 ) ) & m4 ) ; } */ private int inv_mcol ( int x ) { } }
int f2 = FFmulX ( x ) ; int f4 = FFmulX ( f2 ) ; int f8 = FFmulX ( f4 ) ; int f9 = x ^ f8 ; return f2 ^ f4 ^ f8 ^ shift ( f2 ^ f9 , 8 ) ^ shift ( f4 ^ f9 , 16 ) ^ shift ( f9 , 24 ) ;
public class GDLLoader { /** * Initializes a new vertex from a given vertex context . * @ param vertexContext vertex context * @ return new vertex */ private Vertex initNewVertex ( GDLParser . VertexContext vertexContext ) { } }
Vertex v = new Vertex ( ) ; v . setId ( getNewVertexId ( ) ) ; List < String > labels = getLabels ( vertexContext . header ( ) ) ; v . setLabels ( labels . isEmpty ( ) ? useDefaultVertexLabel ? Collections . singletonList ( defaultVertexLabel ) : Collections . emptyList ( ) : labels ) ; v . setProperties ( getProperties ( vertexContext . properties ( ) ) ) ; return v ;
public class Configuration { /** * Sets the name of the cluster in which the current server resides * @ param name The name of the WAS cluster */ public static final void clusterName ( String name ) { } }
if ( tc . isEntryEnabled ( ) ) Tr . entry ( tc , "clusterName" , name ) ; _clusterName = name ; if ( tc . isEntryEnabled ( ) ) Tr . exit ( tc , "clusterName" ) ;
public class SnapshotsInner { /** * Revokes access to a snapshot . * @ param resourceGroupName The name of the resource group . * @ param snapshotName The name of the snapshot that is being created . The name can ' t be changed after the snapshot is created . Supported characters for the name are a - z , A - Z , 0-9 and _ . The max name length is 80 characters . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the OperationStatusResponseInner object */ public Observable < OperationStatusResponseInner > beginRevokeAccessAsync ( String resourceGroupName , String snapshotName ) { } }
return beginRevokeAccessWithServiceResponseAsync ( resourceGroupName , snapshotName ) . map ( new Func1 < ServiceResponse < OperationStatusResponseInner > , OperationStatusResponseInner > ( ) { @ Override public OperationStatusResponseInner call ( ServiceResponse < OperationStatusResponseInner > response ) { return response . body ( ) ; } } ) ;
public class FileBufferedDataOutputStream { /** * / * ( non - Javadoc ) * @ see java . io . OutputStream # write ( byte [ ] , int , int ) */ @ Override public void write ( byte [ ] b , int off , int len ) throws IOException { } }
int wbytes = len ; int offset = off ; while ( wbytes > 0 ) { if ( buf . position ( ) >= buf . capacity ( ) ) { flush ( ) ; } int length = Math . min ( wbytes , buf . limit ( ) - buf . position ( ) ) ; buf . put ( b , offset , length ) ; offset += length ; wbytes -= length ; }
public class AddResourcesListener { /** * Looks for the header in the JSF tree . * @ param root The root of the JSF tree . * @ return null , if the head couldn ' t be found . */ private UIComponent findHeader ( UIViewRoot root ) { } }
for ( UIComponent c : root . getChildren ( ) ) { if ( c instanceof HtmlHead ) return c ; } for ( UIComponent c : root . getChildren ( ) ) { if ( c instanceof HtmlBody ) return null ; if ( c instanceof UIOutput ) if ( c . getFacets ( ) != null ) return c ; } return null ;
public class ApplicationClassLoaderFactory { /** * 获取jar包对应的类加载器 , 对于一个固定的jar包来说 , 该类加载器唯一 * @ param jarFilePath jar包的本地文件路径 * @ return jar包对应的类加载器 */ public static ApplicationClassLoader getJarApplicationClassLoader ( String jarFilePath ) { } }
ApplicationClassLoader jarApplicationClassLoader = jarApplicationClassLoaderCache . get ( jarFilePath ) ; if ( jarApplicationClassLoader != null ) { return jarApplicationClassLoader ; } synchronized ( jarApplicationClassLoaderCache ) { jarApplicationClassLoader = jarApplicationClassLoaderCache . get ( jarFilePath ) ; if ( jarApplicationClassLoader != null ) { return jarApplicationClassLoader ; } jarApplicationClassLoader = new ApplicationClassLoader ( nodeApplicationClassLoader , false ) ; jarApplicationClassLoader . addJarFiles ( jarFilePath ) ; jarApplicationClassLoaderCache . put ( jarFilePath , jarApplicationClassLoader ) ; return jarApplicationClassLoader ; }
public class Shell { /** * Executes a system command with arguments and returns the output . * @ param command command to be executed * @ param encoding encoding to be used * @ return command output * @ throws IOException on any error */ public final String executeSystemCommandAndGetOutput ( final String [ ] command , final String encoding ) throws IOException { } }
Process p = Runtime . getRuntime ( ) . exec ( command ) ; StreamManager sm = new StreamManager ( ) ; try { InputStream input = sm . handle ( p . getInputStream ( ) ) ; StringBuffer lines = new StringBuffer ( ) ; String line ; BufferedReader in = ( BufferedReader ) sm . handle ( new BufferedReader ( new InputStreamReader ( input , encoding ) ) ) ; while ( ( line = in . readLine ( ) ) != null ) { lines . append ( line ) . append ( '\n' ) ; } return lines . toString ( ) ; } finally { sm . closeAll ( ) ; }
public class OperatorStartFuture { /** * Invokes the asynchronous function , surfacing the result through an observable sequence . * < em > Important note < / em > subscribing to the resulting observable blocks until * the future completes . * @ param < T > the result type * @ param functionAsync the asynchronous function to run * @ return the observable */ public static < T > Observable < T > startFuture ( Func0 < ? extends Future < ? extends T > > functionAsync ) { } }
Future < ? extends T > task ; try { task = functionAsync . call ( ) ; } catch ( Throwable t ) { return Observable . error ( t ) ; } return Observable . from ( task ) ;
public class ProxyArtifactStore { /** * { @ inheritDoc } */ public long getLastModified ( Artifact artifact ) throws IOException , ArtifactNotFoundException { } }
org . apache . maven . artifact . Artifact mavenArtifact = artifactFactory . createArtifactWithClassifier ( artifact . getGroupId ( ) , artifact . getArtifactId ( ) , artifact . getTimestampVersion ( ) , artifact . getType ( ) , artifact . getClassifier ( ) ) ; try { artifactResolver . resolve ( mavenArtifact , remoteRepositories , localRepository ) ; final File file = mavenArtifact . getFile ( ) ; if ( file != null && file . isFile ( ) ) { addResolved ( artifact ) ; return file . lastModified ( ) ; } throw new ArtifactNotFoundException ( artifact ) ; } catch ( org . apache . maven . artifact . resolver . ArtifactNotFoundException e ) { ArtifactNotFoundException anfe = new ArtifactNotFoundException ( artifact ) ; anfe . initCause ( e ) ; throw anfe ; } catch ( ArtifactResolutionException e ) { IOException ioe = new IOException ( e . getMessage ( ) ) ; ioe . initCause ( e ) ; throw ioe ; }
public class JNIWriter { /** * Get the ClassWriter instance for this context . */ public static JNIWriter instance ( Context context ) { } }
JNIWriter instance = context . get ( jniWriterKey ) ; if ( instance == null ) instance = new JNIWriter ( context ) ; return instance ;
public class MimeTypeInfoManager { /** * Get the primary ( = first ) mime type associated with the specified filename . * @ param sFilename * The filename to retrieve the primary mime type from . May neither be * < code > null < / code > nor empty . * @ return < code > null < / code > if no mime type is associated with the extension * of the passed filename */ @ Nullable public IMimeType getPrimaryMimeTypeForFilename ( @ Nonnull @ Nonempty final String sFilename ) { } }
ValueEnforcer . notEmpty ( sFilename , "Filename" ) ; final String sExtension = FilenameHelper . getExtension ( sFilename ) ; return getPrimaryMimeTypeForExtension ( sExtension ) ;
public class GosuStringUtil { /** * < p > Removes < code > separator < / code > from the end of * < code > str < / code > if it ' s there , otherwise leave it alone . < / p > * < p > NOTE : This method changed in version 2.0. * It now more closely matches Perl chomp . * For the previous behavior , use { @ link # substringBeforeLast ( String , String ) } . * This method uses { @ link String # endsWith ( String ) } . < / p > * < pre > * GosuStringUtil . chomp ( null , * ) = null * GosuStringUtil . chomp ( " " , * ) = " " * GosuStringUtil . chomp ( " foobar " , " bar " ) = " foo " * GosuStringUtil . chomp ( " foobar " , " baz " ) = " foobar " * GosuStringUtil . chomp ( " foo " , " foo " ) = " " * GosuStringUtil . chomp ( " foo " , " foo " ) = " foo " * GosuStringUtil . chomp ( " foo " , " foo " ) = " " * GosuStringUtil . chomp ( " foo " , " foooo " ) = " foo " * GosuStringUtil . chomp ( " foo " , " " ) = " foo " * GosuStringUtil . chomp ( " foo " , null ) = " foo " * < / pre > * @ param str the String to chomp from , may be null * @ param separator separator String , may be null * @ return String without trailing separator , < code > null < / code > if null String input */ public static String chomp ( String str , String separator ) { } }
if ( isEmpty ( str ) || separator == null ) { return str ; } if ( str . endsWith ( separator ) ) { return str . substring ( 0 , str . length ( ) - separator . length ( ) ) ; } return str ;
public class AwsSecurityFindingFilters { /** * The updated record state for the finding . * @ param recordState * The updated record state for the finding . */ public void setRecordState ( java . util . Collection < StringFilter > recordState ) { } }
if ( recordState == null ) { this . recordState = null ; return ; } this . recordState = new java . util . ArrayList < StringFilter > ( recordState ) ;
public class DocumentLine { /** * indexed getter for endings - gets an indexed value - * @ generated * @ param i index in the array to get * @ return value of the element at index i */ public float getEndings ( int i ) { } }
if ( DocumentLine_Type . featOkTst && ( ( DocumentLine_Type ) jcasType ) . casFeat_endings == null ) jcasType . jcas . throwFeatMissing ( "endings" , "ch.epfl.bbp.uima.types.DocumentLine" ) ; jcasType . jcas . checkArrayBounds ( jcasType . ll_cas . ll_getRefValue ( addr , ( ( DocumentLine_Type ) jcasType ) . casFeatCode_endings ) , i ) ; return jcasType . ll_cas . ll_getFloatArrayValue ( jcasType . ll_cas . ll_getRefValue ( addr , ( ( DocumentLine_Type ) jcasType ) . casFeatCode_endings ) , i ) ;
public class DirectoryScanner { /** * Set the base directory to be scanned . This is the directory which is * scanned recursively . All ' / ' and ' \ ' characters are replaced by * < code > File . separatorChar < / code > , so the separator used need not match * < code > File . separatorChar < / code > . * @ param basedir The base directory to scan . */ public DirectoryScanner setBasedir ( String basedir ) { } }
setBasedir ( basedir == null ? null : new File ( basedir . replace ( '/' , File . separatorChar ) . replace ( '\\' , File . separatorChar ) ) ) ; return this ;
public class HttpClientUtil { /** * 初始化httpclient对象 */ private static void buildHttpClient ( ) { } }
RequestConfig globalConfig = RequestConfig . custom ( ) . setConnectTimeout ( 5000 ) . setSocketTimeout ( 5000 ) . build ( ) ; CloseableHttpClient httpclient = HttpClients . custom ( ) . setKeepAliveStrategy ( new HttpClientKeepAliveStrategy ( ) ) . setDefaultRequestConfig ( globalConfig ) . build ( ) ; HttpClientUtil . httpclient = httpclient ;
public class ResourceList { /** * Find duplicate resource paths within this { @ link ResourceList } . * @ return A { @ link List } of { @ link Entry } objects for all resources in the classpath and / or module path that * have a non - unique path ( i . e . where there are at least two resources with the same path ) . The key of * each returned { @ link Entry } is the path ( obtained from { @ link Resource # getPath ( ) } ) , and the value is * a { @ link ResourceList } of at least two unique { @ link Resource } objects that have that path . */ public List < Entry < String , ResourceList > > findDuplicatePaths ( ) { } }
final List < Entry < String , ResourceList > > duplicatePaths = new ArrayList < > ( ) ; for ( final Entry < String , ResourceList > pathAndResourceList : asMap ( ) . entrySet ( ) ) { // Find ResourceLists with two or more entries if ( pathAndResourceList . getValue ( ) . size ( ) > 1 ) { duplicatePaths . add ( new SimpleEntry < > ( pathAndResourceList . getKey ( ) , pathAndResourceList . getValue ( ) ) ) ; } } CollectionUtils . sortIfNotEmpty ( duplicatePaths , new Comparator < Entry < String , ResourceList > > ( ) { @ Override public int compare ( final Entry < String , ResourceList > o1 , final Entry < String , ResourceList > o2 ) { // Sort in lexicographic order of path return o1 . getKey ( ) . compareTo ( o2 . getKey ( ) ) ; } } ) ; return duplicatePaths ;
public class DataBlockEngine { /** * 追加一个键值对 */ public DataAppendResult append ( StoreTxLogPosition storeTxLogPosition , K key , V value ) { } }
UnsafeByteArrayOutputStream out = new UnsafeByteArrayOutputStream ( ) ; try { DataEntry < K , V > dataEntry = new DataEntry < K , V > ( key , value ) ; serializer . serialize ( dataEntry , out ) ; return append ( storeTxLogPosition , out . toByteArray ( ) ) ; } catch ( Exception e ) { throw new DBException ( "Persistent data error: " + e . getMessage ( ) , e ) ; } finally { try { out . close ( ) ; } catch ( IOException ignored ) { } }
public class TasksModel { /** * < p > Returns the URI for the remote database , based on the app ' s * configuration . < / p > * @ return the remote database ' s URI * @ throws URISyntaxException if the settings give an invalid URI */ private URI createServerURI ( ) throws URISyntaxException { } }
// We store this in plain text for the purposes of simple demonstration , // you might want to use something more secure . SharedPreferences sharedPref = PreferenceManager . getDefaultSharedPreferences ( this . mContext ) ; String username = sharedPref . getString ( TodoActivity . SETTINGS_CLOUDANT_USER , "" ) ; String dbName = sharedPref . getString ( TodoActivity . SETTINGS_CLOUDANT_DB , "" ) ; String apiKey = sharedPref . getString ( TodoActivity . SETTINGS_CLOUDANT_API_KEY , "" ) ; String apiSecret = sharedPref . getString ( TodoActivity . SETTINGS_CLOUDANT_API_SECRET , "" ) ; String host = username + ".cloudant.com" ; // We recommend always using HTTPS to talk to Cloudant . return new URI ( "https" , apiKey + ":" + apiSecret , host , 443 , "/" + dbName , null , null ) ;
public class SyndFeedImpl { /** * Creates a real feed containing the information of the SyndFeedImpl . * @ param feedType the feed type for the WireFeed to be created . * @ return the real feed . */ @ Override public WireFeed createWireFeed ( final String feedType ) { } }
if ( feedType == null ) { throw new IllegalArgumentException ( "Feed type cannot be null" ) ; } final Converter converter = CONVERTERS . getConverter ( feedType ) ; if ( converter == null ) { throw new IllegalArgumentException ( "Invalid feed type [" + feedType + "]" ) ; } return converter . createRealFeed ( this ) ;
public class MSNumpress { /** * Encodes the doubles in data by first using a - lossy conversion to a 4 byte 5 decimal fixed * point repressentation - storing the residuals from a linear prediction after first to values - * encoding by encodeInt ( see above ) * The resulting binary is maximally 8 + dataSize * 5 bytes , but much less if the data is * reasonably smooth on the first order . * This encoding is suitable for typical m / z or retention time binary arrays . On a test set , the * encoding was empirically show to be accurate to at least 0.002 ppm . * @ param data array of doubles to be encoded * @ param dataSize number of doubles from data to encode * @ param result array were resulting bytes should be stored * @ param fixedPoint the scaling factor used for getting the fixed point repr . This is stored in * the binary and automatically extracted on decoding . * @ return the number of encoded bytes */ public static int encodeLinear ( double [ ] data , int dataSize , byte [ ] result , double fixedPoint ) { } }
long [ ] ints = new long [ 3 ] ; int i , ri , halfByteCount , hbi ; byte halfBytes [ ] = new byte [ 10 ] ; long extrapol , diff ; encodeFixedPoint ( fixedPoint , result ) ; if ( dataSize == 0 ) { return 8 ; } ints [ 1 ] = ( long ) ( data [ 0 ] * fixedPoint + 0.5 ) ; for ( i = 0 ; i < 4 ; i ++ ) { result [ 8 + i ] = ( byte ) ( ( ints [ 1 ] >> ( i * 8 ) ) & 0xff ) ; } if ( dataSize == 1 ) { return 12 ; } ints [ 2 ] = ( long ) ( data [ 1 ] * fixedPoint + 0.5 ) ; for ( i = 0 ; i < 4 ; i ++ ) { result [ 12 + i ] = ( byte ) ( ( ints [ 2 ] >> ( i * 8 ) ) & 0xff ) ; } halfByteCount = 0 ; ri = 16 ; for ( i = 2 ; i < dataSize ; i ++ ) { ints [ 0 ] = ints [ 1 ] ; ints [ 1 ] = ints [ 2 ] ; ints [ 2 ] = ( long ) ( data [ i ] * fixedPoint + 0.5 ) ; extrapol = ints [ 1 ] + ( ints [ 1 ] - ints [ 0 ] ) ; diff = ints [ 2 ] - extrapol ; halfByteCount += encodeInt ( diff , halfBytes , halfByteCount ) ; for ( hbi = 1 ; hbi < halfByteCount ; hbi += 2 ) { result [ ri ++ ] = ( byte ) ( ( halfBytes [ hbi - 1 ] << 4 ) | ( halfBytes [ hbi ] & 0xf ) ) ; } if ( halfByteCount % 2 != 0 ) { halfBytes [ 0 ] = halfBytes [ halfByteCount - 1 ] ; halfByteCount = 1 ; } else { halfByteCount = 0 ; } } if ( halfByteCount == 1 ) { result [ ri ++ ] = ( byte ) ( halfBytes [ 0 ] << 4 ) ; } return ri ;
public class ServletHelper { /** * Work around an exception that can occur in Jetty 9.3.13: * < pre > * java . lang . NullPointerException : null * at org . eclipse . jetty . server . Request . getQueryString ( Request . java : 1119 ) ~ [ jetty - server - 9.3.13 . v20161014 . jar : 9.3.13 . v20161014] * at com . helger . web . servlet . request . RequestHelper . getURL ( RequestHelper . java : 340 ) ~ [ ph - web - 8.6.2 . jar : 8.6.2] * < / pre > * @ param aRequest * Source request . May be < code > null < / code > . * @ return < code > null < / code > if request is < code > null < / code > or if no query * string could be determined , or if none is present */ @ Nullable public static String getRequestQueryString ( @ Nullable final HttpServletRequest aRequest ) { } }
String ret = null ; if ( aRequest != null ) try { if ( aRequest . isAsyncSupported ( ) && aRequest . isAsyncStarted ( ) ) ret = ( String ) aRequest . getAttribute ( AsyncContext . ASYNC_QUERY_STRING ) ; else ret = aRequest . getQueryString ( ) ; } catch ( final Exception ex ) { // fall through if ( isLogExceptions ( ) ) if ( LOGGER . isWarnEnabled ( ) ) LOGGER . warn ( "[ServletHelper] Failed to determine query string of HTTP request" , ex ) ; } return ret ;
public class Elements { /** * Returns a function that gets the value of a selected element . * @ return a function that gets the value of a selected element . */ public static < E > OptionalFunction < Selection < Element > , E > getValue ( ) { } }
return OptionalFunction . of ( selection -> selection . result ( ) . getValue ( ) ) ;
public class KCVSLog { /** * Sends a batch of messages by persisting them to the storage backend . * @ param msgEnvelopes */ private void sendMessages ( final List < MessageEnvelope > msgEnvelopes ) { } }
try { boolean success = BackendOperation . execute ( new BackendOperation . Transactional < Boolean > ( ) { @ Override public Boolean call ( StoreTransaction txh ) throws BackendException { ListMultimap < StaticBuffer , Entry > mutations = ArrayListMultimap . create ( ) ; for ( MessageEnvelope env : msgEnvelopes ) { mutations . put ( env . key , env . entry ) ; long ts = env . entry . getColumn ( ) . getLong ( 0 ) ; log . debug ( "Preparing to write {} to storage with column/timestamp {}" , env , times . getTime ( ts ) ) ; } Map < StaticBuffer , KCVMutation > muts = new HashMap < StaticBuffer , KCVMutation > ( mutations . keySet ( ) . size ( ) ) ; for ( StaticBuffer key : mutations . keySet ( ) ) { muts . put ( key , new KCVMutation ( mutations . get ( key ) , KeyColumnValueStore . NO_DELETIONS ) ) ; log . debug ( "Built mutation on key {} with {} additions" , key , mutations . get ( key ) . size ( ) ) ; } manager . storeManager . mutateMany ( ImmutableMap . of ( store . getName ( ) , muts ) , txh ) ; log . debug ( "Wrote {} total envelopes with operation timestamp {}" , msgEnvelopes . size ( ) , txh . getConfiguration ( ) . getCommitTime ( ) ) ; return Boolean . TRUE ; } @ Override public String toString ( ) { return "messageSending" ; } } , this , times , maxWriteTime ) ; Preconditions . checkState ( success ) ; log . debug ( "Wrote {} messages to backend" , msgEnvelopes . size ( ) ) ; for ( MessageEnvelope msgEnvelope : msgEnvelopes ) msgEnvelope . message . delivered ( ) ; } catch ( TitanException e ) { for ( MessageEnvelope msgEnvelope : msgEnvelopes ) msgEnvelope . message . failed ( e ) ; throw e ; }
public class ProxySettings { /** * Add an additional HTTP header passed to the proxy server . * @ param name * The name of an HTTP header ( case - insensitive ) . * If { @ code null } or an empty string is given , * nothing is added . * @ param value * The value of the HTTP header . * @ return * { @ code this } object . */ public ProxySettings addHeader ( String name , String value ) { } }
if ( name == null || name . length ( ) == 0 ) { return this ; } List < String > list = mHeaders . get ( name ) ; if ( list == null ) { list = new ArrayList < String > ( ) ; mHeaders . put ( name , list ) ; } list . add ( value ) ; return this ;
public class GVRAndroidResource { /** * Restore the stream position , to the point set by a previous * { @ link # mark ( ) mark ( ) . } * Please note that calling { @ link # reset ( ) } generally ' consumes ' the * { @ link # mark ( ) } - < em > do not < / em > call * < pre > * mark ( ) ; * reset ( ) ; * reset ( ) ; * < / pre > * @ throws IOException * @ since 1.6.7 */ private void reset ( ) throws IOException { } }
if ( streamState == StreamStates . OPEN ) { if ( stream . markSupported ( ) ) { stream . reset ( ) ; } else { // In case a inputStream ( e . g . , fileInputStream ) doesn ' t support // mark , throw a exception throw new IOException ( "Input stream doesn't support mark" ) ; } }
public class JSPtoPRealization { /** * Method dereferenceLocalisation * < p > Called back by a PtoPMessageItemStream when the Transaction containing it commits . * Removes the localisation and its associated OutputHandler * from the destination . * @ param localisation The localisation to dereference . */ @ Override public void dereferenceLocalisation ( LocalizationPoint ptoPMessageItemStream ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "dereferenceLocalisation" , new Object [ ] { ptoPMessageItemStream , this } ) ; _localisationManager . dereferenceLocalisation ( ptoPMessageItemStream ) ; // Reset the reference to the local messages itemstream if it is being removed . if ( ptoPMessageItemStream . getLocalizingMEUuid ( ) . equals ( _messageProcessor . getMessagingEngineUuid ( ) ) ) { _pToPLocalMsgsItemStream = null ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "dereferenceLocalisation" ) ;
public class ConfigurationAsyncClient { /** * Attempts to get a ConfigurationSetting that matches the { @ code key } . * @ param key The key of the setting to retrieve . * @ return The { @ link ConfigurationSetting } stored in the service , or { @ code null } , if the configuration value does * not exist or the key is an invalid value ( which will also throw ServiceRequestException described below ) . * @ throws IllegalArgumentException If { @ code key } is { @ code null } . * @ throws ServiceRequestException If the { @ code key } and { @ code label } does not exist . Or , if { @ code key } is an * empty string . */ public Mono < Response < ConfigurationSetting > > getSetting ( String key ) { } }
return getSetting ( new ConfigurationSetting ( ) . key ( key ) ) ;
public class ApptentiveNotificationCenter { /** * Creates a notification with a given name and user info and posts it to the receiver . */ public synchronized void postNotification ( final String name , final Map < String , Object > userInfo ) { } }
ApptentiveLog . v ( NOTIFICATIONS , "Post notification: name=%s userInfo={%s}" , name , StringUtils . toString ( userInfo ) ) ; final ApptentiveNotificationObserverList list = findObserverList ( name ) ; if ( list != null ) { list . notifyObservers ( new ApptentiveNotification ( name , userInfo ) ) ; }
public class JvmAnyTypeReferenceImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public void setType ( JvmType newType ) { } }
JvmType oldType = type ; type = newType ; if ( eNotificationRequired ( ) ) eNotify ( new ENotificationImpl ( this , Notification . SET , TypesPackage . JVM_ANY_TYPE_REFERENCE__TYPE , oldType , type ) ) ;
public class Smb2TreeConnectResponse { /** * { @ inheritDoc } * @ see jcifs . internal . smb2 . ServerMessageBlock2Response # prepare ( jcifs . internal . CommonServerMessageBlockRequest ) */ @ Override public void prepare ( CommonServerMessageBlockRequest next ) { } }
if ( isReceived ( ) ) { ( ( ServerMessageBlock2 ) next ) . setTreeId ( getTreeId ( ) ) ; } super . prepare ( next ) ;
public class ApplicationTypeImpl { /** * If not already created , a new < code > jms - destination < / code > element will be created and returned . * Otherwise , the first existing < code > jms - destination < / code > element will be returned . * @ return the instance defined for the element < code > jms - destination < / code > */ public JmsDestinationType < ApplicationType < T > > getOrCreateJmsDestination ( ) { } }
List < Node > nodeList = childNode . get ( "jms-destination" ) ; if ( nodeList != null && nodeList . size ( ) > 0 ) { return new JmsDestinationTypeImpl < ApplicationType < T > > ( this , "jms-destination" , childNode , nodeList . get ( 0 ) ) ; } return createJmsDestination ( ) ;
public class AntClassLoader { /** * Loads a class through this class loader but defer to the parent class * loader . * This ensures that instances of the returned class will be compatible * with instances which have already been loaded on the parent * loader . * @ param classname The name of the class to be loaded . * Must not be < code > null < / code > . * @ return the required Class object * @ exception ClassNotFoundException if the requested class does not exist * on this loader ' s classpath . */ public Class forceLoadSystemClass ( String classname ) throws ClassNotFoundException { } }
log ( "force system loading " + classname , Project . MSG_DEBUG ) ; Class theClass = findLoadedClass ( classname ) ; if ( theClass == null ) { theClass = findBaseClass ( classname ) ; } return theClass ;
public class CssReader { /** * Read forward until the next non - escaped character matches the given * CharMatcher or is EOF . * @ throws IOException */ CssReader forward ( CharMatcher matcher ) throws IOException { } }
while ( true ) { Mark mark = mark ( ) ; next ( ) ; // TODO escape awareness if ( curChar == - 1 || ( matcher . matches ( ( char ) curChar ) && prevChar != '\\' ) ) { unread ( curChar , mark ) ; break ; } } return this ;
public class CrossOriginResourceSharingFilterCommand { /** * private UIInputMany < String > accessControlRequestHeaders ; */ @ Override public void initializeUI ( UIBuilder builder ) throws Exception { } }
super . initializeUI ( builder ) ; getNamed ( ) . setDefaultValue ( "NewCrossOriginResourceSharingFilter" ) ; accessControlAllowHeaders . setValue ( Arrays . asList ( "Content-Type" , "User-Agent" , "X-Requested-With" , "X-Requested-By" , "Cache-Control" ) ) ; accessControlAllowMethods . setValueChoices ( Arrays . asList ( HttpMethod . GET , HttpMethod . POST , HttpMethod . PUT , HttpMethod . DELETE , HttpMethod . HEAD , HttpMethod . OPTIONS ) ) ; accessControlAllowMethods . setValue ( Arrays . asList ( HttpMethod . GET , HttpMethod . POST , HttpMethod . PUT , HttpMethod . DELETE ) ) ; builder . add ( accessControlAllowMethods ) . add ( accessControlAllowHeaders ) . add ( accessControlAllowOrigin ) . add ( accessControlAllowCredentials ) ;
public class NGram { /** * 提取ngram * @ param data * @ param gramSizes * @ param list */ private static void ngramOnCharacter ( String data , int [ ] gramSizes , Collection < String > list ) { } }
data = data . replaceAll ( "\\s+" , "" ) ; for ( int j = 0 ; j < gramSizes . length ; j ++ ) { int len = gramSizes [ j ] ; if ( len <= 0 || len > data . length ( ) ) continue ; for ( int i = 0 ; i < data . length ( ) - len ; i ++ ) { list . add ( data . substring ( i , i + len ) ) ; } }
public class TransitionBuilder { /** * Finds the associate IF for an ELSE _ IF , ELSE or END _ IF pointer . */ private Tree < Row > findIf ( Tree < Row > pointer ) { } }
while ( ! Type . IF . equals ( pointer . getContent ( ) . getType ( ) ) ) { pointer = pointer . getPrevious ( ) ; } return pointer ;
public class MeasureToMeasureDto { /** * return the numerical value as a double . It ' s the type used in db . * Returns null if no numerical value found */ @ CheckForNull private static Double valueAsDouble ( Measure measure ) { } }
switch ( measure . getValueType ( ) ) { case BOOLEAN : return measure . getBooleanValue ( ) ? 1.0d : 0.0d ; case INT : return ( double ) measure . getIntValue ( ) ; case LONG : return ( double ) measure . getLongValue ( ) ; case DOUBLE : return measure . getDoubleValue ( ) ; case NO_VALUE : case STRING : case LEVEL : default : return null ; }
public class ForkedJvm { /** * Adds the specified arguments to the command line for the * < code > main ( ) < / code > method . * @ param arguments * The arguments to add , may be < code > null < / code > . */ public void addArguments ( final String [ ] arguments ) { } }
if ( arguments != null ) { for ( final String argument : arguments ) { addArgument ( argument ) ; } }
public class LogUtils { /** * Checks log level and logs * @ param logger the Logger the log to * @ param level the severity level * @ param message the log message * @ param parameter the parameter to substitute into message */ public static void log ( Logger logger , Level level , String message , Object parameter ) { } }
log ( logger , level , message , new Object [ ] { parameter } ) ;
public class TaskTracker { /** * The task is no longer running . It may not have completed successfully */ void reportTaskFinished ( TaskAttemptID taskid , boolean commitPending ) { } }
TaskInProgress tip ; synchronized ( this ) { tip = tasks . get ( taskid ) ; } if ( tip != null ) { tip . reportTaskFinished ( commitPending ) ; } else { LOG . warn ( "Unknown child task finished: " + taskid + ". Ignored." ) ; }
public class Ifc2x3tc1PackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public EClass getIfcRelDefinesByType ( ) { } }
if ( ifcRelDefinesByTypeEClass == null ) { ifcRelDefinesByTypeEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( Ifc2x3tc1Package . eNS_URI ) . getEClassifiers ( ) . get ( 472 ) ; } return ifcRelDefinesByTypeEClass ;
public class snmpcommunity { /** * Use this API to fetch filtered set of snmpcommunity resources . * filter string should be in JSON format . eg : " port : 80 , servicetype : HTTP " . */ public static snmpcommunity [ ] get_filtered ( nitro_service service , String filter ) throws Exception { } }
snmpcommunity obj = new snmpcommunity ( ) ; options option = new options ( ) ; option . set_filter ( filter ) ; snmpcommunity [ ] response = ( snmpcommunity [ ] ) obj . getfiltered ( service , option ) ; return response ;
public class FindingReplacing { /** * Sets the substrings in given left tag and ending as the search string * @ see Betner # before ( String ) * @ param right * @ return */ public InclusMultiPos < S , String , String > before ( String right ) { } }
return new InclusMultiPos < S , String , String > ( right , null ) { @ Override protected S result ( ) { return aQueue ( 'B' , left , right , pos , position , inclusive , plusminus , filltgt ) ; } } ;
public class HttpSmartProxyHandler { /** * Automatic selection of the authentication algorithm . If < code > preferedOrder < / code > is set then * algorithms are selected from the list order otherwise the algorithm tries to select the most * secured algorithm available first . * @ param response the proxy response */ private void autoSelectAuthHandler ( final HttpProxyResponse response ) throws ProxyAuthException { } }
// Get the Proxy - Authenticate header List < String > values = response . getHeaders ( ) . get ( "Proxy-Authenticate" ) ; ProxyIoSession proxyIoSession = getProxyIoSession ( ) ; if ( values == null || values . size ( ) == 0 ) { authHandler = HttpAuthenticationMethods . NO_AUTH . getNewHandler ( proxyIoSession ) ; } else if ( getProxyIoSession ( ) . getPreferedOrder ( ) == null ) { // No preference order set for auth mechanisms int method = - 1 ; // Test which auth mechanism to use . First found is the first used // that ' s why we test in a decreasing security quality order . for ( String proxyAuthHeader : values ) { proxyAuthHeader = proxyAuthHeader . toLowerCase ( ) ; if ( proxyAuthHeader . contains ( "ntlm" ) ) { method = HttpAuthenticationMethods . NTLM . getId ( ) ; break ; } else if ( proxyAuthHeader . contains ( "digest" ) && method != HttpAuthenticationMethods . NTLM . getId ( ) ) { method = HttpAuthenticationMethods . DIGEST . getId ( ) ; } else if ( proxyAuthHeader . contains ( "basic" ) && method == - 1 ) { method = HttpAuthenticationMethods . BASIC . getId ( ) ; } } if ( method != - 1 ) { try { authHandler = HttpAuthenticationMethods . getNewHandler ( method , proxyIoSession ) ; } catch ( Exception ex ) { logger . debug ( "Following exception occured:" , ex ) ; } } if ( authHandler == null ) { authHandler = HttpAuthenticationMethods . NO_AUTH . getNewHandler ( proxyIoSession ) ; } } else { for ( HttpAuthenticationMethods method : proxyIoSession . getPreferedOrder ( ) ) { if ( authHandler != null ) { break ; } if ( method == HttpAuthenticationMethods . NO_AUTH ) { authHandler = HttpAuthenticationMethods . NO_AUTH . getNewHandler ( proxyIoSession ) ; break ; } for ( String proxyAuthHeader : values ) { proxyAuthHeader = proxyAuthHeader . toLowerCase ( ) ; try { // test which auth mechanism to use if ( proxyAuthHeader . contains ( "basic" ) && method == HttpAuthenticationMethods . BASIC ) { authHandler = HttpAuthenticationMethods . BASIC . getNewHandler ( proxyIoSession ) ; break ; } else if ( proxyAuthHeader . contains ( "digest" ) && method == HttpAuthenticationMethods . DIGEST ) { authHandler = HttpAuthenticationMethods . DIGEST . getNewHandler ( proxyIoSession ) ; break ; } else if ( proxyAuthHeader . contains ( "ntlm" ) && method == HttpAuthenticationMethods . NTLM ) { authHandler = HttpAuthenticationMethods . NTLM . getNewHandler ( proxyIoSession ) ; break ; } } catch ( Exception ex ) { logger . debug ( "Following exception occured:" , ex ) ; } } } } if ( authHandler == null ) { throw new ProxyAuthException ( "Unknown authentication mechanism(s): " + values ) ; }
public class SegmentKelp { /** * Writes a page index . */ int writePageIndex ( byte [ ] buffer , int head , int type , int pid , int nextPid , int entryOffset , int entryLength ) { } }
int sublen = 1 + 4 * 4 ; if ( BLOCK_SIZE - 8 < head + sublen ) { return - 1 ; } buffer [ head ] = ( byte ) type ; head ++ ; BitsUtil . writeInt ( buffer , head , pid ) ; head += 4 ; BitsUtil . writeInt ( buffer , head , nextPid ) ; head += 4 ; BitsUtil . writeInt ( buffer , head , entryOffset ) ; head += 4 ; BitsUtil . writeInt ( buffer , head , entryLength ) ; head += 4 ; return head ;
public class ObligationPolicyDatabase { /** * Add an appropriate policy database entry for parameters marked with the * WillClose annotation . * @ param xmethod * a method * @ param obligation * the Obligation deleted by the method * @ param entryType * type of entry ( STRONG or WEAK ) */ public ObligationPolicyDatabaseEntry addParameterDeletesObligationDatabaseEntry ( XMethod xmethod , Obligation obligation , ObligationPolicyDatabaseEntryType entryType ) { } }
// Add a policy database entry noting that this method // will delete one instance of the obligation type . ObligationPolicyDatabaseEntry entry = new MatchMethodEntry ( xmethod , ObligationPolicyDatabaseActionType . DEL , entryType , obligation ) ; addEntry ( entry ) ; return entry ;