signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class DeviceLocation { /** * Get the last known location . * If one is not available , fetch * a fresh location * @ param context * @ param waitForGpsFix * @ param cb */ public static void getLastKnownLocation ( Context context , boolean waitForGpsFix , final LocationResult cb ) { } }
DeviceLocation deviceLocation = new DeviceLocation ( ) ; LocationManager lm = ( LocationManager ) context . getSystemService ( Context . LOCATION_SERVICE ) ; Location last_loc ; last_loc = lm . getLastKnownLocation ( LocationManager . GPS_PROVIDER ) ; if ( last_loc == null ) last_loc = lm . getLastKnownLocation ( LocationManager . NETWORK_PROVIDER ) ; if ( last_loc != null && cb != null ) { cb . gotLocation ( last_loc ) ; } else { deviceLocation . getLocation ( context , cb , waitForGpsFix ) ; }
public class Utils { /** * calculates a " load " , given on two deltas */ public static double calcLoad ( Long deltaCpuTime , long deltaUptime , long factor ) { } }
if ( deltaCpuTime == null || deltaCpuTime <= 0 || deltaUptime == 0 ) { return 0.0 ; } return deltaCpuTime * 100d / factor / deltaUptime ;
public class GreenPepperXMLAplicationContext { /** * { @ inheritDoc } */ protected Resource getResourceByPath ( String path ) { } }
String classPathPrefix = org . springframework . core . io . ResourceLoader . CLASSPATH_URL_PREFIX ; if ( path . startsWith ( classPathPrefix ) ) { return super . getResourceByPath ( path . substring ( classPathPrefix . length ( ) ) ) ; } if ( path != null && path . startsWith ( "/" ) ) { path = path . substring ( 1 ) ; } return new FileSystemResource ( path ) ;
public class DomXmlMessageValidator { /** * Validate message payloads by comparing to a control message . * @ param receivedMessage * @ param validationContext * @ param context */ protected void validateMessageContent ( Message receivedMessage , Message controlMessage , XmlMessageValidationContext validationContext , TestContext context ) { } }
if ( controlMessage == null || controlMessage . getPayload ( ) == null ) { log . debug ( "Skip message payload validation as no control message was defined" ) ; return ; } if ( ! ( controlMessage . getPayload ( ) instanceof String ) ) { throw new IllegalArgumentException ( "DomXmlMessageValidator does only support message payload of type String, " + "but was " + controlMessage . getPayload ( ) . getClass ( ) ) ; } String controlMessagePayload = controlMessage . getPayload ( String . class ) ; if ( receivedMessage . getPayload ( ) == null || ! StringUtils . hasText ( receivedMessage . getPayload ( String . class ) ) ) { Assert . isTrue ( ! StringUtils . hasText ( controlMessagePayload ) , "Unable to validate message payload - received message payload was empty, control message payload is not" ) ; return ; } else if ( ! StringUtils . hasText ( controlMessagePayload ) ) { return ; } log . debug ( "Start XML tree validation ..." ) ; Document received = XMLUtils . parseMessagePayload ( receivedMessage . getPayload ( String . class ) ) ; Document source = XMLUtils . parseMessagePayload ( controlMessagePayload ) ; XMLUtils . stripWhitespaceNodes ( received ) ; XMLUtils . stripWhitespaceNodes ( source ) ; if ( log . isDebugEnabled ( ) ) { log . debug ( "Received message:\n" + XMLUtils . serialize ( received ) ) ; log . debug ( "Control message:\n" + XMLUtils . serialize ( source ) ) ; } validateXmlTree ( received , source , validationContext , namespaceContextBuilder . buildContext ( receivedMessage , validationContext . getNamespaces ( ) ) , context ) ;
public class ContentInfoUtil { /** * Return the content type for the input - stream or null if none of the magic entries matched . You might want to use * the { @ link ContentInfoInputStreamWrapper } class to delegate to an input - stream and determine content information * at the same time . * < b > NOTE : < / b > The caller is responsible for closing the input - stream . * @ throws IOException * If there was a problem reading from the input - stream . * @ see ContentInfoInputStreamWrapper */ public ContentInfo findMatch ( InputStream inputStream ) throws IOException { } }
byte [ ] bytes = new byte [ fileReadSize ] ; int numRead = inputStream . read ( bytes ) ; if ( numRead < 0 ) { return null ; } if ( numRead < bytes . length ) { // move the bytes into a smaller array bytes = Arrays . copyOf ( bytes , numRead ) ; } return findMatch ( bytes ) ;
public class InitialRequestDispatcher { /** * Try to find a matching Sip Session to a given dialog * @ param dialog the dialog to find the session * @ return the matching session , null if not session have been found */ private MobicentsSipSession retrieveSipSession ( Dialog dialog ) { } }
if ( dialog != null ) { Iterator < SipContext > iterator = sipApplicationDispatcher . findSipApplications ( ) ; while ( iterator . hasNext ( ) ) { SipContext sipContext = iterator . next ( ) ; SipManager sipManager = sipContext . getSipManager ( ) ; Iterator < MobicentsSipSession > sipSessionsIt = sipManager . getAllSipSessions ( ) ; while ( sipSessionsIt . hasNext ( ) ) { MobicentsSipSession mobicentsSipSession = ( MobicentsSipSession ) sipSessionsIt . next ( ) ; MobicentsSipSessionKey sessionKey = mobicentsSipSession . getKey ( ) ; if ( sessionKey . getCallId ( ) . trim ( ) . equals ( dialog . getCallId ( ) . getCallId ( ) ) ) { if ( logger . isDebugEnabled ( ) ) { logger . debug ( "found session with the same Call Id " + sessionKey + ", to Tag " + sessionKey . getToTag ( ) ) ; logger . debug ( "dialog localParty = " + dialog . getLocalParty ( ) . getURI ( ) + ", localTag " + dialog . getLocalTag ( ) ) ; logger . debug ( "dialog remoteParty = " + dialog . getRemoteParty ( ) . getURI ( ) + ", remoteTag " + dialog . getRemoteTag ( ) ) ; } if ( sessionKey . getFromTag ( ) . equals ( dialog . getLocalTag ( ) ) && sessionKey . getToTag ( ) . equals ( dialog . getRemoteTag ( ) ) ) { if ( mobicentsSipSession . getProxy ( ) == null ) { return mobicentsSipSession ; } } else if ( sessionKey . getFromTag ( ) . equals ( dialog . getRemoteTag ( ) ) && sessionKey . getToTag ( ) . equals ( dialog . getLocalTag ( ) ) ) { if ( mobicentsSipSession . getProxy ( ) == null ) { return mobicentsSipSession ; } } } } } } return null ;
public class HttpServiceProxy { /** * { @ inheritDoc } */ @ Override public void setSessionTimeout ( final Integer minutes , final HttpContext httpContext ) { } }
LOG . debug ( "Setting session timeout to " + minutes + " minutes for http context [" + httpContext + "]" ) ; delegate . setSessionTimeout ( minutes , httpContext ) ;
public class LoadBalancerBuilder { /** * Build a load balancer using the configuration from the { @ link IClientConfig } only . It uses reflection to initialize necessary load balancer * components . */ public ILoadBalancer buildLoadBalancerFromConfigWithReflection ( ) { } }
String loadBalancerClassName = config . get ( CommonClientConfigKey . NFLoadBalancerClassName ) ; if ( loadBalancerClassName == null ) { throw new IllegalArgumentException ( "NFLoadBalancerClassName is not specified in the IClientConfig" ) ; } ILoadBalancer lb ; try { lb = ( ILoadBalancer ) factory . create ( loadBalancerClassName , config ) ; } catch ( Exception e ) { throw new RuntimeException ( e ) ; } return lb ;
public class license_file { /** * Use this API to fetch filtered set of license _ file resources . * filter string should be in JSON format . eg : " vm _ state : DOWN , name : [ a - z ] + " */ public static license_file [ ] get_filtered ( nitro_service service , String filter ) throws Exception { } }
license_file obj = new license_file ( ) ; options option = new options ( ) ; option . set_filter ( filter ) ; license_file [ ] response = ( license_file [ ] ) obj . getfiltered ( service , option ) ; return response ;
public class LinkClustering { /** * Returns the edge similarity matrix for the edges in the provided sparse * matrix . */ private Matrix getEdgeSimMatrix ( List < Edge > edgeList , SparseMatrix sm , boolean keepSimilarityMatrixInMemory ) { } }
return ( keepSimilarityMatrixInMemory ) ? calculateEdgeSimMatrix ( edgeList , sm ) : new LazySimilarityMatrix ( edgeList , sm ) ;
public class QBitValue { /** * { @ inheritDoc } */ @ Override public QBitValue appendSQL ( final SQLSelect _sql ) { } }
final IBitEnum [ ] consts = this . bitEnum . getClass ( ) . getEnumConstants ( ) ; final int max = consts [ consts . length - 1 ] . getInt ( ) * 2 ; boolean first = true ; for ( int i = 0 ; i < max ; i ++ ) { if ( ! this . added . contains ( i ) && BitEnumType . isSelected ( i , this . bitEnum ) ) { if ( first ) { first = false ; } else { _sql . addPart ( SQLPart . COMMA ) ; } _sql . addValuePart ( i ) ; } } return this ;
public class CacheContainerCommands { /** * An attribute write handler which performs special processing for ALIAS attributes . * @ param context the operation context * @ param operation the operation being executed * @ throws org . jboss . as . controller . OperationFailedException */ @ Override public void execute ( OperationContext context , ModelNode operation ) throws OperationFailedException { } }
if ( context . isNormalServer ( ) ) { ModelNode operationResult ; try { operationResult = invokeCommand ( getEmbeddedCacheManager ( context , operation ) , context , operation ) ; } catch ( Exception e ) { throw new OperationFailedException ( MESSAGES . failedToInvokeOperation ( e . getLocalizedMessage ( ) ) , e ) ; } if ( operationResult != null ) { context . getResult ( ) . set ( operationResult ) ; } }
public class XmlDoclet { /** * Check for doclet - added options . Returns the number of arguments you must * specify on the command line for the given option . For example , " - d docs " * would return 2. * This method is required if the doclet contains any options . If this * method is missing , Javadoc will print an invalid flag error for every * option . * @ see com . sun . javadoc . Doclet # optionLength ( String ) * @ param optionName * The name of the option . * @ return number of arguments on the command line for an option including * the option name itself . Zero return means option not known . * Negative value means error occurred . */ public static int optionLength ( String optionName ) { } }
Option option = options . getOption ( optionName ) ; if ( option == null ) { return 0 ; } return option . getArgs ( ) + 1 ;
public class VisOdomMonoPlaneInfinity { /** * Removes tracks which have not been included in the inlier set recently * @ return Number of dropped tracks */ private int dropUnusedTracks ( ) { } }
List < PointTrack > all = tracker . getAllTracks ( null ) ; int num = 0 ; for ( PointTrack t : all ) { VoTrack p = t . getCookie ( ) ; if ( tick - p . lastInlier > thresholdRetire ) { tracker . dropTrack ( t ) ; num ++ ; } } return num ;
public class ProjectableSQLQuery { /** * Add the given String literal as query flag * @ param position position * @ param flag query flag * @ return the current object */ @ Override public Q addFlag ( Position position , String flag ) { } }
return queryMixin . addFlag ( new QueryFlag ( position , flag ) ) ;
public class UrlRewriter { /** * Fixes all resources urls and returns the result . * @ param input * The html to be processed . * @ param requestUrl * The request URL . * @ param baseUrlParam * The base URL selected for this request . * @ param visibleBaseUrl * The base URL viewed by the browser . * @ param absolute * Should the rewritten urls contain the scheme host and port * @ return the result of this renderer . */ public CharSequence rewriteHtml ( CharSequence input , String requestUrl , String baseUrlParam , String visibleBaseUrl , boolean absolute ) { } }
StringBuffer result = new StringBuffer ( input . length ( ) ) ; Matcher m = URL_PATTERN . matcher ( input ) ; while ( m . find ( ) ) { String url = input . subSequence ( m . start ( 3 ) + 1 , m . end ( 3 ) - 1 ) . toString ( ) ; String tag = m . group ( 0 ) ; String quote = input . subSequence ( m . end ( 3 ) - 1 , m . end ( 3 ) ) . toString ( ) ; // Browsers tolerate urls with white spaces before or after String trimmedUrl = StringUtils . trim ( url ) ; String rewrittenUrl = url ; trimmedUrl = unescapeHtml ( trimmedUrl ) ; if ( trimmedUrl . isEmpty ( ) ) { LOG . debug ( "empty url kept unchanged" ) ; } else if ( trimmedUrl . startsWith ( "#" ) ) { LOG . debug ( "anchor url kept unchanged: [{}]" , url ) ; } else if ( JAVASCRIPT_CONCATENATION_PATTERN . matcher ( trimmedUrl ) . find ( ) ) { LOG . debug ( "url in javascript kept unchanged: [{}]" , url ) ; } else if ( m . group ( 2 ) . equalsIgnoreCase ( "content" ) ) { if ( META_REFRESH_PATTERN . matcher ( tag ) . find ( ) ) { rewrittenUrl = rewriteRefresh ( trimmedUrl , requestUrl , baseUrlParam , visibleBaseUrl ) ; rewrittenUrl = escapeHtml ( rewrittenUrl ) ; LOG . debug ( "refresh url [{}] rewritten [{}]" , url , rewrittenUrl ) ; } else { LOG . debug ( "content attribute kept unchanged: [{}]" , url ) ; } } else { rewrittenUrl = rewriteUrl ( trimmedUrl , requestUrl , baseUrlParam , visibleBaseUrl , absolute ) ; rewrittenUrl = escapeHtml ( rewrittenUrl ) ; LOG . debug ( "url [{}] rewritten [{}]" , url , rewrittenUrl ) ; } m . appendReplacement ( result , "" ) ; // Copy what is between the previous match and the current match result . append ( "<" ) ; result . append ( m . group ( 1 ) ) ; result . append ( m . group ( 2 ) ) ; result . append ( "=" ) ; result . append ( quote ) ; result . append ( rewrittenUrl ) ; result . append ( quote ) ; if ( m . groupCount ( ) > 3 ) { result . append ( m . group ( 4 ) ) ; } result . append ( ">" ) ; } m . appendTail ( result ) ; // Copy the reminder of the input return result ;
public class vpnvserver_rewritepolicy_binding { /** * Use this API to fetch vpnvserver _ rewritepolicy _ binding resources of given name . */ public static vpnvserver_rewritepolicy_binding [ ] get ( nitro_service service , String name ) throws Exception { } }
vpnvserver_rewritepolicy_binding obj = new vpnvserver_rewritepolicy_binding ( ) ; obj . set_name ( name ) ; vpnvserver_rewritepolicy_binding response [ ] = ( vpnvserver_rewritepolicy_binding [ ] ) obj . get_resources ( service ) ; return response ;
public class MqttDecoder { /** * Decodes the variable header ( if any ) * @ param buffer the buffer to decode from * @ param mqttFixedHeader MqttFixedHeader of the same message * @ return the variable header */ private static Result < ? > decodeVariableHeader ( ByteBuf buffer , MqttFixedHeader mqttFixedHeader ) { } }
switch ( mqttFixedHeader . messageType ( ) ) { case CONNECT : return decodeConnectionVariableHeader ( buffer ) ; case CONNACK : return decodeConnAckVariableHeader ( buffer ) ; case SUBSCRIBE : case UNSUBSCRIBE : case SUBACK : case UNSUBACK : case PUBACK : case PUBREC : case PUBCOMP : case PUBREL : return decodeMessageIdVariableHeader ( buffer ) ; case PUBLISH : return decodePublishVariableHeader ( buffer , mqttFixedHeader ) ; case PINGREQ : case PINGRESP : case DISCONNECT : // Empty variable header return new Result < Object > ( null , 0 ) ; } return new Result < Object > ( null , 0 ) ; // should never reach here
public class Image { /** * Gets a PDF Name from an array or returns the object that was passed . */ private PdfObject simplifyColorspace ( PdfArray obj ) { } }
if ( obj == null ) return obj ; PdfName first = obj . getAsName ( 0 ) ; if ( PdfName . CALGRAY . equals ( first ) ) return PdfName . DEVICEGRAY ; else if ( PdfName . CALRGB . equals ( first ) ) return PdfName . DEVICERGB ; else return obj ;
public class ErrorHandlingClient { /** * This is more advanced and does not make use of the stub . You should not normally need to do * this , but here is how you would . */ void advancedAsyncCall ( ) { } }
ClientCall < HelloRequest , HelloReply > call = channel . newCall ( GreeterGrpc . getSayHelloMethod ( ) , CallOptions . DEFAULT ) ; final CountDownLatch latch = new CountDownLatch ( 1 ) ; call . start ( new ClientCall . Listener < HelloReply > ( ) { @ Override public void onClose ( Status status , Metadata trailers ) { Verify . verify ( status . getCode ( ) == Status . Code . INTERNAL ) ; Verify . verify ( status . getDescription ( ) . contains ( "Narwhal" ) ) ; // Cause is not transmitted over the wire . latch . countDown ( ) ; } } , new Metadata ( ) ) ; call . sendMessage ( HelloRequest . newBuilder ( ) . setName ( "Marge" ) . build ( ) ) ; call . halfClose ( ) ; if ( ! Uninterruptibles . awaitUninterruptibly ( latch , 1 , TimeUnit . SECONDS ) ) { throw new RuntimeException ( "timeout!" ) ; }
public class Beacon { /** * Provides a calculated estimate of the distance to the beacon based on a running average of * the RSSI and the transmitted power calibration value included in the beacon advertisement . * This value is specific to the type of Android device receiving the transmission . * @ see # mDistance * @ return distance */ public double getDistance ( ) { } }
if ( mDistance == null ) { double bestRssiAvailable = mRssi ; if ( mRunningAverageRssi != null ) { bestRssiAvailable = mRunningAverageRssi ; } else { LogManager . d ( TAG , "Not using running average RSSI because it is null" ) ; } mDistance = calculateDistance ( mTxPower , bestRssiAvailable ) ; } return mDistance ;
public class Db { /** * Helper method to load a property file from class path . * @ param filesToLoad * an array of paths ( class path paths ) designating where the files may be . All files are loaded , in the order * given . Missing files are silently ignored . * @ return a Properties object , which may be empty but not null . */ public static Properties loadProperties ( String [ ] filesToLoad ) { } }
Properties p = new Properties ( ) ; InputStream fis = null ; for ( String path : filesToLoad ) { try { fis = Db . class . getClassLoader ( ) . getResourceAsStream ( path ) ; if ( fis != null ) { p . load ( fis ) ; jqmlogger . info ( "A jqm.properties file was found at {}" , path ) ; } } catch ( IOException e ) { // We allow no configuration files , but not an unreadable configuration file . throw new DatabaseException ( "META-INF/jqm.properties file is invalid" , e ) ; } finally { closeQuietly ( fis ) ; } } // Overload the datasource name from environment variable if any ( tests only ) . String dbName = System . getenv ( "DB" ) ; if ( dbName != null ) { p . put ( "com.enioka.jqm.jdbc.datasource" , "jdbc/" + dbName ) ; } // Done return p ;
public class ApiOvhHostingweb { /** * Get this object properties * REST : GET / hosting / web / { serviceName } / freedom / { domain } * @ param serviceName [ required ] The internal name of your hosting * @ param domain [ required ] Freedom domain */ public OvhFreedom serviceName_freedom_domain_GET ( String serviceName , String domain ) throws IOException { } }
String qPath = "/hosting/web/{serviceName}/freedom/{domain}" ; StringBuilder sb = path ( qPath , serviceName , domain ) ; String resp = exec ( qPath , "GET" , sb . toString ( ) , null ) ; return convertTo ( resp , OvhFreedom . class ) ;
public class CompilerUtil { /** * Marshals the specified argument to the declared type . May involve conversion . The following * argument conversions may occur : * < ul > * < li > List to Set if the argument type is a List ( i . e . NativeArray ) and the declared type is Set * < / li > * < li > Sting to Enum constant if the declared type is an Enum and the argument is a string which * matches one of the Enum constant names . < / li > * < / ul > * @ param declaredType * the type of the argument declared in the class definition * @ param value * the argument value provided in the config * @ return the possibly converted argument value , or null if the value cannot be converted to * the declared type */ private static Object marshalArg ( Class < ? > declaredType , Object value ) { } }
final String sourceMethod = "marshalArg" ; // $ NON - NLS - 1 $ final boolean isTraceLogging = log . isLoggable ( Level . FINER ) ; if ( isTraceLogging ) { log . entering ( sourceClass , sourceMethod , new Object [ ] { declaredType , value } ) ; } Object result = null ; Class < ? > argType = value . getClass ( ) ; if ( ! fromPrimitive ( declaredType ) . isAssignableFrom ( fromPrimitive ( argType ) ) ) { if ( declaredType . isAssignableFrom ( Set . class ) && value instanceof List ) { // Convert the list to a set Set < ? > set = new HashSet < Object > ( ( List < ? > ) value ) ; result = set ; } else if ( declaredType . isEnum ( ) && argType . equals ( String . class ) ) { // If the declared type is an enum and the provided type is a string , // then see if the string matches the name of an enum constant in the declared type . @ SuppressWarnings ( "unchecked" ) Class < ? extends Enum < ? > > enumType = ( Class < ? extends Enum < ? > > ) declaredType ; Object enumValue = getEnumNamesMap ( enumType ) . get ( value . toString ( ) ) ; if ( enumValue != null ) { result = enumValue ; } } else if ( declaredType . isAssignableFrom ( DiagnosticGroup . class ) && argType . equals ( String . class ) ) { // If the arg value matches a statically defined DiagnosticGroup in DiagnosticGroups , then // return that . result = findNamedDiagnosticGroup ( value . toString ( ) ) ; } } else { result = value ; } if ( isTraceLogging ) { log . exiting ( sourceMethod , sourceMethod , result ) ; } return result ;
public class SamlIdPMetadataDocument { /** * Gets encryption certificate decoded . * @ return the encryption certificate decoded */ @ JsonIgnore public String getEncryptionCertificateDecoded ( ) { } }
if ( EncodingUtils . isBase64 ( encryptionCertificate ) ) { return EncodingUtils . decodeBase64ToString ( encryptionCertificate ) ; } return encryptionCertificate ;
public class PaxWicketObjectOutputStream { /** * { @ inheritDoc } */ @ Override protected void writeObjectOverride ( final Object object ) throws IOException { } }
try { outputStream . writeObject ( object ) ; } catch ( NotSerializableException e ) { if ( CheckingObjectOutputStream . isAvailable ( ) ) { // trigger serialization again , but this time gather some more info new PaxWicketSerializableChecker ( e ) { @ Override protected boolean validateAdditionalSerializableConditions ( Object obj ) { return ! ( obj instanceof BundleContext ) && ! ( obj instanceof Bundle ) ; } @ Override protected Object additionalObjectReplacements ( Object streamObj ) { if ( streamObj instanceof BundleContext ) { BundleContext context = ( BundleContext ) streamObj ; streamObj = new ReplaceBundleContext ( context ) ; } else if ( streamObj instanceof Bundle ) { Bundle bundle = ( Bundle ) streamObj ; streamObj = new ReplaceBundle ( bundle ) ; } return streamObj ; } } . writeObject ( object ) ; // if we get here , we didn ' t fail , while we should ; throw e ; } throw e ; } catch ( RuntimeException e ) { LOGGER . error ( "error writing object " + object + ": " + e . getMessage ( ) , e ) ; throw e ; }
public class KeyManager { /** * Tick event called on the Client . < br > * Used to simulate pressing and releasing of our additional keys . < br > * This is about as close as we can ( easily ) get in the call stack to the point when Minecraft does the equivalent code for its own keys . * @ param ev ClientTickEvent for this tick . */ @ SubscribeEvent public void onClientTick ( TickEvent . ClientTickEvent ev ) { } }
if ( ev != null && ev . phase == Phase . START ) { for ( InternalKey binding : this . additionalKeys ) { if ( binding . isKeyDown ( ) ) { binding . onKeyDown ( ) ; } if ( binding . isPressed ( ) ) { binding . onPressed ( ) ; } } }
public class DBUtils { /** * returned by the query . In the case of an insert , returns null . */ public static List < List < Object > > runInsertSelect ( DBConnectionFactory connectionFactory , String sql , List < Object > sqlParams , boolean isWrite , int numRetries , int retryMaxInternalSec , boolean insert , boolean getGeneratedKeys ) throws IOException { } }
int waitMS = 3000 ; // wait for at least 3s before next retry . for ( int i = 0 ; i < numRetries ; ++ i ) { Connection conn = null ; ResultSet generatedKeys = null ; PreparedStatement pstmt = null ; String url = null ; try { try { url = connectionFactory . getUrl ( isWrite ) ; } catch ( IOException ioe ) { LOG . warn ( "Cannot get DB URL, fall back to the default one" , ioe ) ; url = defaultUrls . get ( isWrite ) ; if ( url == null ) { throw ioe ; } } LOG . info ( "Attepting connection with URL " + url ) ; conn = connectionFactory . getConnection ( url ) ; defaultUrls . put ( isWrite , url ) ; pstmt = getPreparedStatement ( conn , sql , sqlParams , getGeneratedKeys ) ; if ( insert ) { int recordsUpdated = pstmt . executeUpdate ( ) ; LOG . info ( "rows inserted: " + recordsUpdated + " sql: " + sql ) ; List < List < Object > > results = null ; if ( getGeneratedKeys ) { generatedKeys = pstmt . getGeneratedKeys ( ) ; results = getResults ( generatedKeys ) ; } Thread . sleep ( connectionFactory . getDBOpsSleepTime ( ) + rand . nextInt ( 1000 ) ) ; return results ; } else { generatedKeys = pstmt . executeQuery ( ) ; List < List < Object > > results = getResults ( generatedKeys ) ; pstmt . clearBatch ( ) ; LOG . info ( "rows selected: " + results . size ( ) + " sql: " + sql ) ; Thread . sleep ( connectionFactory . getDBOpsSleepTime ( ) + rand . nextInt ( 1000 ) ) ; return results ; } } catch ( Exception e ) { // We should catch a better exception than Exception , but since // DBConnectionUrlFactory . getUrl ( ) defines throws Exception , it ' s hard // for us to figure out the complete set it can throw . We follow // DBConnectionUrlFactory . getUrl ( ) ' s definition to catch Exception . // It shouldn ' t be a big problem as after numRetries , we anyway exit . LOG . info ( "Exception " + e + ". Will retry " + ( numRetries - i ) + " times." ) ; // Introducing a random factor to the wait time before another retry . // The wait time is dependent on # of failures and a random factor . // At the first time of getting a SQLException , the wait time // is a random number between [ 0,300 ] msec . If the first retry // still fails , we will wait 300 msec grace period before the 2nd retry . // Also at the second retry , the waiting window is expanded to 600 msec // alleviating the request rate from the server . Similarly the 3rd retry // will wait 600 msec grace period before retry and the waiting window // is // expanded to 1200 msec . waitMS += waitMS ; if ( waitMS > retryMaxInternalSec * 1000 ) { waitMS = retryMaxInternalSec * 1000 ; } double waitTime = waitMS + waitMS * rand . nextDouble ( ) ; if ( i + 1 == numRetries ) { LOG . error ( "Still got Exception after " + numRetries + " retries." , e ) ; throw new IOException ( e ) ; } try { Thread . sleep ( ( long ) waitTime ) ; } catch ( InterruptedException ie ) { throw new IOException ( ie ) ; } } finally { DBUtils . close ( generatedKeys , new PreparedStatement [ ] { pstmt } , conn ) ; } } return null ;
public class ConnectionMonitorsInner { /** * Query a snapshot of the most recent connection states . * @ param resourceGroupName The name of the resource group containing Network Watcher . * @ param networkWatcherName The name of the Network Watcher resource . * @ param connectionMonitorName The name given to the connection monitor . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ throws ErrorResponseException thrown if the request is rejected by server * @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @ return the ConnectionMonitorQueryResultInner object if successful . */ public ConnectionMonitorQueryResultInner query ( String resourceGroupName , String networkWatcherName , String connectionMonitorName ) { } }
return queryWithServiceResponseAsync ( resourceGroupName , networkWatcherName , connectionMonitorName ) . toBlocking ( ) . last ( ) . body ( ) ;
public class ClientImpl { /** * Shutdown the client closing all network connections and release * all memory resources . * @ throws InterruptedException */ @ Override public void close ( ) throws InterruptedException { } }
if ( m_blessedThreadIds . contains ( Thread . currentThread ( ) . getId ( ) ) ) { throw new RuntimeException ( "Can't invoke backpressureBarrier from within the client callback thread " + " without deadlocking the client library" ) ; } m_isShutdown = true ; synchronized ( m_backpressureLock ) { m_backpressureLock . notifyAll ( ) ; } if ( m_reconnectStatusListener != null ) { m_distributer . removeClientStatusListener ( m_reconnectStatusListener ) ; m_reconnectStatusListener . close ( ) ; } if ( m_ex != null ) { m_ex . shutdown ( ) ; if ( CoreUtils . isJunitTest ( ) ) { m_ex . awaitTermination ( 1 , TimeUnit . SECONDS ) ; } else { m_ex . awaitTermination ( 365 , TimeUnit . DAYS ) ; } } m_distributer . shutdown ( ) ; ClientFactory . decreaseClientNum ( ) ;
public class FlowControllerFactory { /** * Get a FlowController instance , given a FlowController class . * @ param flowControllerClass the Class , which must be assignable to { @ link FlowController } . * @ return a new FlowController instance . */ public FlowController getFlowControllerInstance ( Class flowControllerClass ) throws InstantiationException , IllegalAccessException { } }
assert FlowController . class . isAssignableFrom ( flowControllerClass ) : "Class " + flowControllerClass . getName ( ) + " does not extend " + FlowController . class . getName ( ) ; return ( FlowController ) flowControllerClass . newInstance ( ) ;
public class Animation { /** * Add animation frame to the animation * @ param frame The image to display for the frame * @ param duration The duration to display the frame for */ public void addFrame ( Image frame , int duration ) { } }
if ( duration == 0 ) { Log . error ( "Invalid duration: " + duration ) ; throw new RuntimeException ( "Invalid duration: " + duration ) ; } if ( frames . isEmpty ( ) ) { nextChange = ( int ) ( duration / speed ) ; } frames . add ( new Frame ( frame , duration ) ) ; currentFrame = 0 ;
public class JmsConnectionImpl { /** * if the sessionMode is SESSION _ TRANSACTED , we create a transacted session * @ see javax . jms . Connection # createSession ( int ) */ @ Override public Session createSession ( int sessionMode ) throws JMSException { } }
Session session = null ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( this , tc , "createSession" , new Object [ ] { sessionMode } ) ; try { if ( sessionMode == JMSContext . SESSION_TRANSACTED ) session = createSession ( true , sessionMode ) ; else session = createSession ( false , sessionMode ) ; } finally { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( this , tc , "createSession" ) ; } return session ;
public class CompileTask { /** * Returns the most recent modified timestamp of the file collection . * Note : this must be combined into one method to account for both * Path and FileList erasure types . * @ param fileLists Collection of FileList or Path * @ return Most recent modified timestamp */ private long getLastModifiedTime ( List < ? > fileLists ) { } }
long lastModified = 0 ; for ( Object entry : fileLists ) { if ( entry instanceof FileList ) { FileList list = ( FileList ) entry ; for ( String fileName : list . getFiles ( this . getProject ( ) ) ) { File path = list . getDir ( this . getProject ( ) ) ; File file = new File ( path , fileName ) ; lastModified = Math . max ( getLastModifiedTime ( file ) , lastModified ) ; } } else if ( entry instanceof Path ) { Path path = ( Path ) entry ; for ( String src : path . list ( ) ) { File file = new File ( src ) ; lastModified = Math . max ( getLastModifiedTime ( file ) , lastModified ) ; } } } return lastModified ;
public class ApplicationMaster { /** * Checks if an Alluxio master node is already running * or not on the master address given . * @ return true if master exists , false otherwise */ private boolean masterExists ( ) { } }
String webPort = mAlluxioConf . get ( PropertyKey . MASTER_WEB_PORT ) ; try { URL myURL = new URL ( "http://" + mMasterAddress + ":" + webPort + Constants . REST_API_PREFIX + "/master/version" ) ; LOG . debug ( "Checking for master at: " + myURL . toString ( ) ) ; HttpURLConnection connection = ( HttpURLConnection ) myURL . openConnection ( ) ; connection . setRequestMethod ( HttpMethod . GET ) ; int resCode = connection . getResponseCode ( ) ; LOG . debug ( "Response code from master was: " + Integer . toString ( resCode ) ) ; connection . disconnect ( ) ; return resCode == HttpURLConnection . HTTP_OK ; } catch ( MalformedURLException e ) { LOG . error ( "Malformed URL in attempt to check if master is running already" , e ) ; } catch ( IOException e ) { LOG . debug ( "No existing master found" , e ) ; } return false ;
public class GSMXImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public boolean eIsSet ( int featureID ) { } }
switch ( featureID ) { case AfplibPackage . GSMX__MODE : return MODE_EDEFAULT == null ? mode != null : ! MODE_EDEFAULT . equals ( mode ) ; } return super . eIsSet ( featureID ) ;
public class RoaringBitmap { /** * Estimate of the memory usage of this data structure . This can be expected to be within 1 % of * the true memory usage in common usage scenarios . * If exact measures are needed , we recommend using dedicated libraries * such as ehcache - sizeofengine . * In adversarial cases , this estimate may be 10x the actual memory usage . For example , if * you insert a single random value in a bitmap , then over a 100 bytes may be used by the JVM * whereas this function may return an estimate of 32 bytes . * The same will be true in the " sparse " scenario where you have a small set of * random - looking integers spanning a wide range of values . * These are considered adversarial cases because , as a general rule , * if your data looks like a set * of random integers , Roaring bitmaps are probably not the right data structure . * Note that you can serialize your Roaring Bitmaps to disk and then construct * ImmutableRoaringBitmap instances from a ByteBuffer . In such cases , the Java heap * usage will be significantly less than * what is reported . * If your main goal is to compress arrays of integers , there are other libraries * that are maybe more appropriate * such as JavaFastPFOR . * Note , however , that in general , random integers ( as produced by random number * generators or hash functions ) are not compressible . * Trying to compress random data is an adversarial use case . * @ see < a href = " https : / / github . com / lemire / JavaFastPFOR " > JavaFastPFOR < / a > * @ return estimated memory usage . */ @ Override public long getLongSizeInBytes ( ) { } }
long size = 8 ; for ( int i = 0 ; i < this . highLowContainer . size ( ) ; i ++ ) { final Container c = this . highLowContainer . getContainerAtIndex ( i ) ; size += 2 + c . getSizeInBytes ( ) ; } return size ;
public class MasterPageStyle { /** * Return the master - style informations for this PageStyle . * @ param util a util for XML writing * @ param appendable where to write * @ throws IOException If an I / O error occurs */ public void appendXMLToMasterStyle ( final XMLUtil util , final Appendable appendable ) throws IOException { } }
appendable . append ( "<style:master-page" ) ; util . appendEAttribute ( appendable , "style:name" , this . name ) ; util . appendEAttribute ( appendable , "style:page-layout-name" , this . layoutName ) ; appendable . append ( "><style:header>" ) ; this . header . appendXMLToMasterStyle ( util , appendable ) ; appendable . append ( "</style:header>" ) ; appendable . append ( "<style:header-left" ) ; util . appendAttribute ( appendable , "style:display" , false ) ; appendable . append ( "/>" ) ; appendable . append ( "<style:footer>" ) ; this . footer . appendXMLToMasterStyle ( util , appendable ) ; appendable . append ( "</style:footer>" ) ; appendable . append ( "<style:footer-left" ) ; util . appendAttribute ( appendable , "style:display" , false ) ; appendable . append ( "/>" ) ; appendable . append ( "</style:master-page>" ) ;
public class JsGeometryMergeService { /** * Register a { @ link GeometryMergeRemovedHandler } to listen to events that signal a geometry has been removed from * the list for merging . * @ param handler * The { @ link GeometryMergeRemovedHandler } to add as listener . * @ return The registration of the handler . */ public JsHandlerRegistration addGeometryMergeRemovedHandler ( final GeometryMergeRemovedHandler handler ) { } }
org . geomajas . plugin . editing . client . merge . event . GeometryMergeRemovedHandler h ; h = new org . geomajas . plugin . editing . client . merge . event . GeometryMergeRemovedHandler ( ) { public void onGeometryMergingRemoved ( GeometryMergeRemovedEvent event ) { org . geomajas . plugin . editing . jsapi . client . merge . event . GeometryMergeRemovedEvent e ; e = new org . geomajas . plugin . editing . jsapi . client . merge . event . GeometryMergeRemovedEvent ( event . getGeometry ( ) ) ; handler . onGeometryMergeRemoved ( e ) ; } } ; HandlerRegistration registration = delegate . addGeometryMergeRemovedHandler ( h ) ; return new JsHandlerRegistration ( new HandlerRegistration [ ] { registration } ) ;
public class AWSDirectoryServiceClient { /** * Adds or removes domain controllers to or from the directory . Based on the difference between current value and * new value ( provided through this API call ) , domain controllers will be added or removed . It may take up to 45 * minutes for any new domain controllers to become fully active once the requested number of domain controllers is * updated . During this time , you cannot make another update request . * @ param updateNumberOfDomainControllersRequest * @ return Result of the UpdateNumberOfDomainControllers operation returned by the service . * @ throws EntityDoesNotExistException * The specified entity could not be found . * @ throws DirectoryUnavailableException * The specified directory is unavailable or could not be found . * @ throws DomainControllerLimitExceededException * The maximum allowed number of domain controllers per directory was exceeded . The default limit per * directory is 20 domain controllers . * @ throws InvalidParameterException * One or more parameters are not valid . * @ throws UnsupportedOperationException * The operation is not supported . * @ throws ClientException * A client exception has occurred . * @ throws ServiceException * An exception has occurred in AWS Directory Service . * @ sample AWSDirectoryService . UpdateNumberOfDomainControllers * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / ds - 2015-04-16 / UpdateNumberOfDomainControllers " * target = " _ top " > AWS API Documentation < / a > */ @ Override public UpdateNumberOfDomainControllersResult updateNumberOfDomainControllers ( UpdateNumberOfDomainControllersRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeUpdateNumberOfDomainControllers ( request ) ;
public class Cluster { /** * Cluster operations that are waiting to be started . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setPendingActions ( java . util . Collection ) } or { @ link # withPendingActions ( java . util . Collection ) } if you want * to override the existing values . * @ param pendingActions * Cluster operations that are waiting to be started . * @ return Returns a reference to this object so that method calls can be chained together . */ public Cluster withPendingActions ( String ... pendingActions ) { } }
if ( this . pendingActions == null ) { setPendingActions ( new com . amazonaws . internal . SdkInternalList < String > ( pendingActions . length ) ) ; } for ( String ele : pendingActions ) { this . pendingActions . add ( ele ) ; } return this ;
public class MetadataFinder { /** * Start finding track metadata for all active players . Starts the { @ link VirtualCdj } if it is not already * running , because we need it to send us device status updates to notice when new tracks are loaded ; this * starts the { @ link DeviceFinder } ( which is also needed by the { @ code VirtualCdj } ) so we can keep track of * the comings and goings of players themselves . We start the { @ link ConnectionManager } in order to make queries * to obtain metadata . * @ throws Exception if there is a problem starting the required components */ public synchronized void start ( ) throws Exception { } }
if ( ! isRunning ( ) ) { ConnectionManager . getInstance ( ) . addLifecycleListener ( lifecycleListener ) ; ConnectionManager . getInstance ( ) . start ( ) ; DeviceFinder . getInstance ( ) . start ( ) ; DeviceFinder . getInstance ( ) . addDeviceAnnouncementListener ( announcementListener ) ; VirtualCdj . getInstance ( ) . addLifecycleListener ( lifecycleListener ) ; VirtualCdj . getInstance ( ) . start ( ) ; VirtualCdj . getInstance ( ) . addUpdateListener ( updateListener ) ; queueHandler = new Thread ( new Runnable ( ) { @ Override public void run ( ) { while ( isRunning ( ) ) { try { handleUpdate ( pendingUpdates . take ( ) ) ; } catch ( InterruptedException e ) { logger . debug ( "Interrupted, presumably due to MetadataFinder shutdown." , e ) ; } catch ( Exception e ) { logger . error ( "Problem handling CDJ status update." , e ) ; } } } } ) ; running . set ( true ) ; queueHandler . start ( ) ; deliverLifecycleAnnouncement ( logger , true ) ; // If there are already any rekordbox instances on the network , " mount " their collections . for ( DeviceAnnouncement existingDevice : DeviceFinder . getInstance ( ) . getCurrentDevices ( ) ) { announcementListener . deviceFound ( existingDevice ) ; } }
public class GoogleTransporter { /** * - - - SUBSCRIBE - - - */ @ Override public Promise subscribe ( String channel ) { } }
try { // Create topic ProjectTopicName topicName = ProjectTopicName . of ( projectID , channel ) ; Topic topic = null ; try { topic = topicAdmin . getTopic ( topicName ) ; } catch ( NotFoundException notFound ) { } if ( topic == null ) { topic = topicAdmin . createTopic ( topicName ) ; logger . info ( "Topic \"" + topic . getName ( ) + "\" created successfully." ) ; } // Create subscription String nodeSubscription ; if ( channel . endsWith ( '.' + nodeID ) ) { nodeSubscription = channel ; } else { nodeSubscription = channel + '-' + nodeID ; } ProjectSubscriptionName subscriptionName = ProjectSubscriptionName . of ( projectID , nodeSubscription ) ; Subscription subscription = null ; try { subscription = subscriptionAdmin . getSubscription ( subscriptionName ) ; } catch ( NotFoundException notFound ) { } if ( subscription == null ) { subscription = subscriptionAdmin . createSubscription ( subscriptionName , topicName , PushConfig . getDefaultInstance ( ) , ackDeadlineSeconds ) ; logger . info ( "Subscription \"" + subscription . getName ( ) + "\" created successfully." ) ; } // Create subscriber synchronized ( subscribers ) { if ( ! subscribers . containsKey ( nodeSubscription ) ) { Subscriber . Builder builder = Subscriber . newBuilder ( subscriptionName , ( message , consumer ) -> { // Message received try { // We are running in a netty executor ' s pool , // do not create new task . processReceivedMessage ( channel , message . getData ( ) . toByteArray ( ) ) ; } finally { consumer . ack ( ) ; } } ) ; if ( channelProvider != null ) { builder . setChannelProvider ( channelProvider ) ; } if ( credentialsProvider != null ) { builder . setCredentialsProvider ( credentialsProvider ) ; } if ( executorProvider != null ) { builder . setExecutorProvider ( executorProvider ) ; builder . setSystemExecutorProvider ( executorProvider ) ; } else { builder . setExecutorProvider ( defaultExecutorProvider ) ; builder . setSystemExecutorProvider ( defaultExecutorProvider ) ; } if ( headerProvider != null ) { builder . setHeaderProvider ( headerProvider ) ; } if ( maxAckExtensionPeriod != null ) { builder . setMaxAckExtensionPeriod ( maxAckExtensionPeriod ) ; } if ( parallelPullCount > 0 ) { builder . setParallelPullCount ( parallelPullCount ) ; } if ( flowControlSettings != null ) { builder . setFlowControlSettings ( flowControlSettings ) ; } Subscriber subscriber = builder . build ( ) ; subscriber . startAsync ( ) ; subscribers . put ( nodeSubscription , subscriber ) ; logger . info ( "Subscriber created for subscription \"" + subscriber . getSubscriptionNameString ( ) + "\"." ) ; } } } catch ( Exception cause ) { return Promise . reject ( cause ) ; } return Promise . resolve ( ) ;
public class DoubleRangeValidator { /** * - - - - - StateHolder Methods */ public Object saveState ( FacesContext context ) { } }
if ( context == null ) { throw new NullPointerException ( ) ; } if ( ! initialStateMarked ( ) ) { Object values [ ] = new Object [ 2 ] ; values [ 0 ] = maximum ; values [ 1 ] = minimum ; return ( values ) ; } return null ;
public class Timestamp { /** * Applies the local offset from UTC to each of the applicable time field * values and returns the new Timestamp . In short , this makes the Timestamp * represent local time . * @ return a new Timestamp in its local time */ private Timestamp make_localtime ( ) { } }
int offset = _offset != null ? _offset . intValue ( ) : 0 ; // We use a Copy - Constructor that expects the time parameters to be in // UTC , as that ' s what we ' re supposed to have . // As this Copy - Constructor doesn ' t apply local offset to the time // field values ( it assumes that the local offset is already applied to // them ) , we explicitly apply the local offset to the time field values // after we obtain the new Timestamp instance . Timestamp localtime = new Timestamp ( _precision , _year , _month , _day , _hour , _minute , _second , _fraction , _offset , APPLY_OFFSET_NO ) ; // explicitly apply the local offset to the time field values localtime . apply_offset ( - offset ) ; assert localtime . _offset == _offset ; return localtime ;
public class SrvShoppingCart { /** * < p > Reveal shared tax rules for cart . It also makes buyer - regCustomer . < / p > * @ param pRqVs request scoped vars * @ param pCart cart * @ param pAs Accounting Settings * @ return tax rules , NULL if not taxable * @ throws Exception - an exception . */ @ Override public final TaxDestination revealTaxRules ( final Map < String , Object > pRqVs , final Cart pCart , final AccSettings pAs ) throws Exception { } }
if ( pCart . getBuyer ( ) . getRegCustomer ( ) == null ) { // copy buyer info into non - persistable customer . pCart . getBuyer ( ) . setRegCustomer ( new DebtorCreditor ( ) ) ; pCart . getBuyer ( ) . getRegCustomer ( ) . setIsForeigner ( pCart . getBuyer ( ) . getForeig ( ) ) ; pCart . getBuyer ( ) . getRegCustomer ( ) . setRegZip ( pCart . getBuyer ( ) . getRegZip ( ) ) ; pCart . getBuyer ( ) . getRegCustomer ( ) . setTaxDestination ( pCart . getBuyer ( ) . getTaxDest ( ) ) ; } TaxDestination txRules = null ; if ( pAs . getIsExtractSalesTaxFromSales ( ) && ! pCart . getBuyer ( ) . getRegCustomer ( ) . getIsForeigner ( ) ) { if ( pCart . getBuyer ( ) . getRegCustomer ( ) . getTaxDestination ( ) != null ) { // override tax method : txRules = pCart . getBuyer ( ) . getRegCustomer ( ) . getTaxDestination ( ) ; } else { txRules = new TaxDestination ( ) ; txRules . setSalTaxIsInvoiceBase ( pAs . getSalTaxIsInvoiceBase ( ) ) ; txRules . setSalTaxUseAggregItBas ( pAs . getSalTaxUseAggregItBas ( ) ) ; txRules . setSalTaxRoundMode ( pAs . getSalTaxRoundMode ( ) ) ; } } return txRules ;
public class LdapConsentRepository { /** * Merges a new decision into existing decisions . * Decisions are matched by ID . * @ param ldapConsent existing consent decisions * @ param decision new decision * @ return new decision set */ private static Set < String > mergeDecision ( final LdapAttribute ldapConsent , final ConsentDecision decision ) { } }
if ( decision . getId ( ) < 0 ) { decision . setId ( System . currentTimeMillis ( ) ) ; } if ( ldapConsent != null ) { val result = removeDecision ( ldapConsent , decision . getId ( ) ) ; val json = mapToJson ( decision ) ; if ( StringUtils . isBlank ( json ) ) { throw new IllegalArgumentException ( "Could not map consent decision to JSON" ) ; } result . add ( json ) ; LOGGER . debug ( "Merged consent decision [{}] with LDAP attribute [{}]" , decision , ldapConsent . getName ( ) ) ; return CollectionUtils . wrap ( result ) ; } val result = new HashSet < String > ( ) ; val json = mapToJson ( decision ) ; if ( StringUtils . isBlank ( json ) ) { throw new IllegalArgumentException ( "Could not map consent decision to JSON" ) ; } result . add ( json ) ; return result ;
public class Util { /** * Compare two possibly null objects * @ param thisone * @ param thatone * @ return int - 1 , 0 , 1, */ @ SuppressWarnings ( "unchecked" ) public static int cmpObjval ( final Comparable thisone , final Comparable thatone ) { } }
if ( thisone == null ) { if ( thatone == null ) { return 0 ; } return - 1 ; } if ( thatone == null ) { return 1 ; } return thisone . compareTo ( thatone ) ;
public class ChannelHelper { /** * Copy all content from the source channel to the destination channel . * @ param aSrc * Source channel . May not be < code > null < / code > . Is not closed after * the operation . * @ param aDest * Destination channel . May not be < code > null < / code > . Is not closed * after the operation . * @ return The number of bytes written . * @ throws IOException * In case of IO error */ @ Nonnegative public static long channelCopy ( @ Nonnull @ WillNotClose final ReadableByteChannel aSrc , @ Nonnull @ WillNotClose final WritableByteChannel aDest ) throws IOException { } }
ValueEnforcer . notNull ( aSrc , "SourceChannel" ) ; ValueEnforcer . isTrue ( aSrc . isOpen ( ) , "SourceChannel is not open!" ) ; ValueEnforcer . notNull ( aDest , "DestinationChannel" ) ; ValueEnforcer . isTrue ( aDest . isOpen ( ) , "DestinationChannel is not open!" ) ; long nBytesWritten ; if ( USE_COPY_V1 ) nBytesWritten = _channelCopy1 ( aSrc , aDest ) ; else nBytesWritten = _channelCopy2 ( aSrc , aDest ) ; return nBytesWritten ;
public class CommerceCountryPersistenceImpl { /** * Returns a range of all the commerce countries where groupId = & # 63 ; and shippingAllowed = & # 63 ; and active = & # 63 ; . * Useful when paginating results . Returns a maximum of < code > end - start < / code > instances . < code > start < / code > and < code > end < / code > are not primary keys , they are indexes in the result set . Thus , < code > 0 < / code > refers to the first result in the set . Setting both < code > start < / code > and < code > end < / code > to { @ link QueryUtil # ALL _ POS } will return the full result set . If < code > orderByComparator < / code > is specified , then the query will include the given ORDER BY logic . If < code > orderByComparator < / code > is absent and pagination is required ( < code > start < / code > and < code > end < / code > are not { @ link QueryUtil # ALL _ POS } ) , then the query will include the default ORDER BY logic from { @ link CommerceCountryModelImpl } . If both < code > orderByComparator < / code > and pagination are absent , for performance reasons , the query will not have an ORDER BY clause and the returned result set will be sorted on by the primary key in an ascending order . * @ param groupId the group ID * @ param shippingAllowed the shipping allowed * @ param active the active * @ param start the lower bound of the range of commerce countries * @ param end the upper bound of the range of commerce countries ( not inclusive ) * @ return the range of matching commerce countries */ @ Override public List < CommerceCountry > findByG_S_A ( long groupId , boolean shippingAllowed , boolean active , int start , int end ) { } }
return findByG_S_A ( groupId , shippingAllowed , active , start , end , null ) ;
public class JRDF { /** * Tells whether the given objects are equivalent , with one given as a set * of simple values . * @ param o1 * first node . * @ param o2 * second node URI ( if isLiteral is false ) or lexical value ( if * isLiteral is true ) * @ param isLiteral * whether the second node is a literal . * @ param type * second literal ' s datatype URI string , if applicable . * @ param lang * second literal ' s language tag string , if applicable . * @ return true if equivalent , false otherwise . */ public static boolean sameObject ( ObjectNode o1 , String o2 , boolean isLiteral , URI type , String lang ) { } }
if ( o1 instanceof URIReference ) { return sameResource ( ( URIReference ) o1 , o2 ) ; } else if ( o1 instanceof Literal || isLiteral ) { return sameLiteral ( ( Literal ) o1 , o2 , type , lang ) ; } else { return false ; }
public class DOMUtils { /** * Create a org . w3c . dom . Text node . * This uses the document builder associated with the current thread . */ public static Text createTextNode ( String value ) { } }
Document doc = getOwnerDocument ( ) ; return doc . createTextNode ( value ) ;
public class ODataRendererUtils { /** * Checks if we are trying to force expand all Nav properties for function calls by looking at expand parameter . * @ param oDataUri The OData URI * @ return boolean if force expand parameter is set */ public static boolean isForceExpandParamSet ( ODataUri oDataUri ) { } }
if ( isFunctionCallUri ( oDataUri ) ) { // Check if we have expand param set to true Option < scala . collection . immutable . Map < String , String > > params = getFunctionCallParameters ( oDataUri ) ; if ( params . isDefined ( ) && ! params . get ( ) . isEmpty ( ) ) { Map < String , String > parametersMap = JavaConverters . mapAsJavaMap ( params . get ( ) ) ; if ( parametersMap . containsKey ( FORCE_EXPAND_PARAM ) ) { return Boolean . parseBoolean ( parametersMap . get ( FORCE_EXPAND_PARAM ) ) ; } } } return false ;
public class Jenkins { /** * Sign up for the user account . */ public void doSignup ( StaplerRequest req , StaplerResponse rsp ) throws IOException , ServletException { } }
if ( getSecurityRealm ( ) . allowsSignup ( ) ) { req . getView ( getSecurityRealm ( ) , "signup.jelly" ) . forward ( req , rsp ) ; return ; } req . getView ( SecurityRealm . class , "signup.jelly" ) . forward ( req , rsp ) ;
public class ZonalDateTime { /** * / * [ deutsch ] * < p > Interpretiert den angegebenen Text als { @ code ZonalDateTime } . < / p > * < p > Hinweis : Diese Methode kann in Lambda - Ausdr & uuml ; cken verwendet werden , weil sie < i > checked exceptions < / i > * vermeidet . < / p > * @ param text text to be parsed * @ param parser helps to parse given text * @ return parsed result * @ throws IndexOutOfBoundsException if the text is empty * @ throws ChronoException if parsing does not work ( for example missing timezone information ) * @ since 5.0 */ public static ZonalDateTime parse ( String text , TemporalFormatter < Moment > parser ) { } }
try { RawValues rawValues = new RawValues ( ) ; Moment moment = parser . parse ( text , rawValues ) ; Timezone tz ; if ( rawValues . get ( ) . hasTimezone ( ) ) { tz = toTimezone ( rawValues . get ( ) . getTimezone ( ) , text ) ; } else if ( parser . getAttributes ( ) . contains ( TIMEZONE_ID ) ) { tz = toTimezone ( parser . getAttributes ( ) . get ( TIMEZONE_ID ) , text ) ; } else { throw new ChronoException ( "Missing timezone: " + text ) ; } return ZonalDateTime . of ( moment , tz ) ; } catch ( ParseException pe ) { throw new ChronoException ( pe . getMessage ( ) , pe ) ; }
public class DeleteUserRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( DeleteUserRequest deleteUserRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( deleteUserRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( deleteUserRequest . getUserName ( ) , USERNAME_BINDING ) ; protocolMarshaller . marshall ( deleteUserRequest . getAwsAccountId ( ) , AWSACCOUNTID_BINDING ) ; protocolMarshaller . marshall ( deleteUserRequest . getNamespace ( ) , NAMESPACE_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class BDAFactory { /** * only for extensions */ public static WebSphereBeanDeploymentArchive createBDA ( WebSphereCDIDeployment deployment , ExtensionArchive extensionArchive , CDIRuntime cdiRuntime ) throws CDIException { } }
Set < String > additionalClasses = extensionArchive . getExtraClasses ( ) ; Set < String > additionalAnnotations = extensionArchive . getExtraBeanDefiningAnnotations ( ) ; boolean extensionCanSeeApplicationBDAs = extensionArchive . applicationBDAsVisible ( ) ; boolean extClassesOnlyBDA = extensionArchive . isExtClassesOnly ( ) ; String archiveID = deployment . getDeploymentID ( ) + "#" + extensionArchive . getName ( ) + ".additionalClasses" ; // this isn ' t really part of any EE module so we just use the archiveID which should be unique EEModuleDescriptor eeModuleDescriptor = new WebSphereEEModuleDescriptor ( archiveID , extensionArchive . getType ( ) ) ; WebSphereBeanDeploymentArchive bda = createBDA ( deployment , archiveID , extensionArchive , cdiRuntime , additionalClasses , additionalAnnotations , extensionCanSeeApplicationBDAs , extClassesOnlyBDA , eeModuleDescriptor ) ; return bda ;
public class FaviconBehavior { /** * { @ inheritDoc } */ @ Override public void renderHead ( final Component component , final IHeaderResponse response ) { } }
super . renderHead ( component , response ) ; response . render ( new StringHeaderItem ( "<link type=\"image/x-icon\" rel=\"shortcut icon\" href=\"favicon.ico\" />" ) ) ;
public class BackupClientImpl { /** * { @ inheritDoc } */ public String startBackUp ( String repositoryName , String workspaceName , String backupDir ) throws IOException , BackupExecuteException { } }
if ( workspaceName != null ) { String sURL = path + HTTPBackupAgent . Constants . BASE_URL + HTTPBackupAgent . Constants . OperationType . START_BACKUP + "/" + repositoryName + "/" + workspaceName ; BackupConfigBean bean = new BackupConfigBean ( BackupManager . FULL_BACKUP_ONLY , backupDir ) ; JsonGeneratorImpl generatorImpl = new JsonGeneratorImpl ( ) ; JsonValue json ; try { json = generatorImpl . createJsonObject ( bean ) ; } catch ( JsonException e ) { throw new BackupExecuteException ( "Can not get json from : " + bean . getClass ( ) . toString ( ) , e ) ; } BackupAgentResponse response = transport . executePOST ( sURL , json . toString ( ) ) ; if ( response . getStatus ( ) == Response . Status . OK . getStatusCode ( ) ) { return "\nSuccessful : \n" + "\tstatus code = " + response . getStatus ( ) + "\n" ; } else { return failureProcessing ( response ) ; } } else { String sURL = path + HTTPBackupAgent . Constants . BASE_URL + HTTPBackupAgent . Constants . OperationType . START_BACKUP_REPOSITORY + "/" + repositoryName ; BackupConfigBean bean = new BackupConfigBean ( BackupManager . FULL_BACKUP_ONLY , backupDir ) ; JsonGeneratorImpl generatorImpl = new JsonGeneratorImpl ( ) ; JsonValue json ; try { json = generatorImpl . createJsonObject ( bean ) ; } catch ( JsonException e ) { throw new BackupExecuteException ( "Can not get json from : " + bean . getClass ( ) . toString ( ) , e ) ; } BackupAgentResponse response = transport . executePOST ( sURL , json . toString ( ) ) ; if ( response . getStatus ( ) == Response . Status . OK . getStatusCode ( ) ) { return "\nSuccessful : \n" + "\tstatus code = " + response . getStatus ( ) + "\n" ; } else { return failureProcessing ( response ) ; } }
public class EnvironmentConfig { /** * Sets percent of minimum database utilization . Default value is { @ code 50 } . That means that 50 percent * of free space in raw data in { @ code Log } files ( . xd files ) is allowed . If database utilization is less than * defined ( free space percent is more than { @ code 50 } ) , the database garbage collector is triggered . * < p > Mutable at runtime : yes * @ param percent percent of minimum database utilization * @ return this { @ code EnvironmentConfig } instance * @ throws InvalidSettingException { @ code percent } is not in the range [ 1 . . 90] */ public EnvironmentConfig setGcMinUtilization ( int percent ) throws InvalidSettingException { } }
if ( percent < 1 || percent > 90 ) { throw new InvalidSettingException ( "Invalid minimum log files utilization: " + percent ) ; } return setSetting ( GC_MIN_UTILIZATION , percent ) ;
public class CredHubInterpolationServiceDataPostProcessor { /** * Convert from the Spring Cloud Connectors service data structure to the Spring * Credhub data structure . * @ param rawServiceData the Spring Cloud Connectors data structure * @ return the equivalent Spring CredHub data structure */ private ServicesData connectorsToCredHub ( CloudFoundryRawServiceData rawServiceData ) { } }
ServicesData servicesData = new ServicesData ( ) ; servicesData . putAll ( rawServiceData ) ; return servicesData ;
public class RebindConfiguration { /** * @ param clazz class to find the type * @ return the { @ link JType } denoted by the class given in parameter */ private JType findType ( Class < ? > clazz ) { } }
if ( clazz . isPrimitive ( ) ) { return JPrimitiveType . parse ( clazz . getCanonicalName ( ) ) ; } else if ( clazz . isArray ( ) ) { try { return context . getTypeOracle ( ) . parse ( clazz . getCanonicalName ( ) ) ; } catch ( TypeOracleException e ) { logger . log ( TreeLogger . WARN , "Cannot find the array denoted by the class " + clazz . getCanonicalName ( ) ) ; return null ; } } else { return findClassType ( clazz ) ; }
public class FilesImpl { /** * Lists the files in a task ' s directory on its compute node . * @ param jobId The ID of the job that contains the task . * @ param taskId The ID of the task whose files you want to list . * @ param recursive Whether to list children of the task directory . This parameter can be used in combination with the filter parameter to list specific type of files . * @ param fileListFromTaskOptions Additional parameters for the operation * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the PagedList & lt ; NodeFile & gt ; object */ public Observable < ServiceResponseWithHeaders < Page < NodeFile > , FileListFromTaskHeaders > > listFromTaskWithServiceResponseAsync ( final String jobId , final String taskId , final Boolean recursive , final FileListFromTaskOptions fileListFromTaskOptions ) { } }
return listFromTaskSinglePageAsync ( jobId , taskId , recursive , fileListFromTaskOptions ) . concatMap ( new Func1 < ServiceResponseWithHeaders < Page < NodeFile > , FileListFromTaskHeaders > , Observable < ServiceResponseWithHeaders < Page < NodeFile > , FileListFromTaskHeaders > > > ( ) { @ Override public Observable < ServiceResponseWithHeaders < Page < NodeFile > , FileListFromTaskHeaders > > call ( ServiceResponseWithHeaders < Page < NodeFile > , FileListFromTaskHeaders > page ) { String nextPageLink = page . body ( ) . nextPageLink ( ) ; if ( nextPageLink == null ) { return Observable . just ( page ) ; } FileListFromTaskNextOptions fileListFromTaskNextOptions = null ; if ( fileListFromTaskOptions != null ) { fileListFromTaskNextOptions = new FileListFromTaskNextOptions ( ) ; fileListFromTaskNextOptions . withClientRequestId ( fileListFromTaskOptions . clientRequestId ( ) ) ; fileListFromTaskNextOptions . withReturnClientRequestId ( fileListFromTaskOptions . returnClientRequestId ( ) ) ; fileListFromTaskNextOptions . withOcpDate ( fileListFromTaskOptions . ocpDate ( ) ) ; } return Observable . just ( page ) . concatWith ( listFromTaskNextWithServiceResponseAsync ( nextPageLink , fileListFromTaskNextOptions ) ) ; } } ) ;
public class LongTuples { /** * Set all elements of the given tuple to the given value * @ param t The tuple * @ param v The value */ public static void set ( MutableLongTuple t , long v ) { } }
for ( int i = 0 ; i < t . getSize ( ) ; i ++ ) { t . set ( i , v ) ; }
public class AbstractSelectCodeGenerator { /** * ( non - Javadoc ) * @ see com . abubusoft . kripton . processor . sqlite . SelectBuilderUtility . * SelectCodeGenerator # generate ( com . squareup . javapoet . TypeSpec . Builder , * boolean , com . abubusoft . kripton . processor . sqlite . model . SQLiteModelMethod ) */ @ Override public void generate ( TypeSpec . Builder classBuilder , boolean mapFields , SQLiteModelMethod method ) { } }
Set < JQLProjection > fieldList = JQLChecker . getInstance ( ) . extractProjections ( method , method . jql . value , method . getEntity ( ) ) ; // generate method code MethodSpec . Builder methodBuilder = generateMethodBuilder ( method ) ; generateCommonPart ( method , classBuilder , methodBuilder , fieldList , true ) ; methodBuilder . addComment ( "Specialized part - $L - BEGIN" , this . getClass ( ) . getSimpleName ( ) ) ; generateSpecializedPart ( method , classBuilder , methodBuilder , fieldList , selectType . isMapFields ( ) ) ; methodBuilder . addComment ( "Specialized part - $L - END" , this . getClass ( ) . getSimpleName ( ) ) ; if ( ! method . isPagedLiveData ( ) ) { // method ForLiveData do not have to be generated for paged live // data classBuilder . addMethod ( methodBuilder . build ( ) ) ; }
public class ExceptionSoftening { /** * overrides the visitor to find catch blocks that throw runtime exceptions * @ param seen the opcode of the currently parsed instruction */ @ Override public void sawOpcode ( int seen ) { } }
try { stack . precomputation ( this ) ; int pc = getPC ( ) ; CodeException ex = catchHandlerPCs . get ( Integer . valueOf ( pc ) ) ; if ( ex != null ) { int endPC ; if ( ( seen == Const . GOTO ) || ( seen == Const . GOTO_W ) ) { endPC = this . getBranchTarget ( ) ; } else { endPC = Integer . MAX_VALUE ; } ConstantPool pool = getConstantPool ( ) ; ConstantClass ccls = ( ConstantClass ) pool . getConstant ( ex . getCatchType ( ) ) ; String catchSig = ccls . getBytes ( pool ) ; CatchInfo ci = new CatchInfo ( ex . getHandlerPC ( ) , endPC , catchSig ) ; catchInfos . add ( ci ) ; } updateEndPCsOnCatchRegScope ( catchInfos , pc , seen ) ; removeFinishedCatchBlocks ( catchInfos , pc ) ; if ( seen == Const . ATHROW ) { processThrow ( ) ; } else if ( ( seen == Const . IRETURN ) && isBooleanMethod && ! hasValidFalseReturn && ( stack . getStackDepth ( ) > 0 ) ) { processBooleanReturn ( ) ; } } finally { stack . sawOpcode ( this , seen ) ; }
public class ChannelFrameworkImpl { /** * @ see com . ibm . wsspi . channelfw . ChannelFramework # updateChain ( String , String [ ] ) */ @ Override public synchronized ChainData updateChain ( String chainName , String [ ] newChannelList ) throws ChannelException , ChainException { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) { Tr . entry ( tc , "updateChain: " + chainName ) ; } if ( null == chainName ) { throw new InvalidChainNameException ( "Null chain name" ) ; } if ( ( null == newChannelList ) || ( 0 == newChannelList . length ) ) { throw new InvalidChannelNameException ( "Null or empty channel list" ) ; } // Verify chain config exists . ChainDataImpl oldChainData = chainDataMap . get ( chainName ) ; if ( null == oldChainData ) { InvalidChainNameException e = new InvalidChainNameException ( "Unable to update unknown chain, " + chainName ) ; FFDCFilter . processException ( e , getClass ( ) . getName ( ) + ".updateChain" , "1724" , this , new Object [ ] { chainName } ) ; throw e ; } // Verify the chain config is not currently in use by the runtime . Chain chain = chainRunningMap . get ( chainName ) ; if ( chain != null ) { ChainException e = new ChainException ( "Unable to update runtime chain " + chainName + ". Destroy chain first." ) ; FFDCFilter . processException ( e , getClass ( ) . getName ( ) + ".updateChain" , "1733" , this , new Object [ ] { chain } ) ; throw e ; } // Verify all channel configs were found in the framework . ChannelData [ ] newChannelData = new ChannelData [ newChannelList . length ] ; for ( int i = 0 ; i < newChannelList . length ; i ++ ) { // Build up list of new channel configs for the new chain . newChannelData [ i ] = channelDataMap . get ( newChannelList [ i ] ) ; if ( null == newChannelData [ i ] ) { InvalidChannelNameException e = new InvalidChannelNameException ( "Unable to update chain config with unknown channel, " + newChannelList [ i ] ) ; FFDCFilter . processException ( e , getClass ( ) . getName ( ) + ".updateChain" , "1752" , this , new Object [ ] { newChannelList [ i ] } ) ; throw e ; } } // Ensure that new configuration is different from old one . ChannelData [ ] oldChannelDataArray = oldChainData . getChannelList ( ) ; boolean configurationDifferent = true ; if ( oldChannelDataArray . length == newChannelData . length ) { // Same number of channelRunningMap . Now look for exact same // channelRunningMap . String oldChannelName = null ; String newChannelName = null ; boolean foundOldChannel = false ; configurationDifferent = false ; for ( int j = 0 ; j < oldChannelDataArray . length ; j ++ ) { oldChannelName = oldChannelDataArray [ j ] . getName ( ) ; foundOldChannel = false ; for ( int k = 0 ; k < newChannelData . length ; k ++ ) { newChannelName = newChannelData [ k ] . getName ( ) ; if ( oldChannelName . equals ( newChannelName ) ) { foundOldChannel = true ; break ; } } if ( ! foundOldChannel ) { // Never found the old channel in the new channel list . configurationDifferent = true ; break ; } } } if ( ! configurationDifferent ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "Identical channel list, no update" ) ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) { Tr . exit ( tc , "updateChain" ) ; } return oldChainData ; } // Create the new chain configuration object with the input chain name . ChainDataImpl newChainData = null ; try { newChainData = ( ChainDataImpl ) createChainData ( chainName , FlowType . INBOUND , newChannelData , oldChainData . getPropertyBag ( ) ) ; } catch ( IncoherentChainException e ) { FFDCFilter . processException ( e , getClass ( ) . getName ( ) + ".updateChain" , "1792" , this , new Object [ ] { chainName , newChannelData } ) ; throw e ; } // Ensure existing listeners in old chain config move to new chain config . newChainData . setChainEventListeners ( oldChainData . removeAllChainEventListeners ( ) ) ; // Create or swap in new chain config . this . chainDataMap . put ( chainName , newChainData ) ; // Update any chain groups including this chain . ChainGroupDataImpl groupData = null ; Iterator < ChainGroupData > groupIter = chainGroups . values ( ) . iterator ( ) ; while ( groupIter . hasNext ( ) ) { groupData = ( ChainGroupDataImpl ) groupIter . next ( ) ; if ( groupData . containsChain ( chainName ) ) { groupData . updateChain ( newChainData ) ; } } // Alert the chain event listener . newChainData . chainUpdated ( ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) { Tr . exit ( tc , "updateChain" ) ; } return newChainData ;
public class DiSHPreferenceVectorIndex { /** * Initializes the dimension selecting distancefunctions to determine the * preference vectors . * @ param relation the database storing the objects * @ param dimensionality the dimensionality of the objects * @ return the dimension selecting distancefunctions to determine the * preference vectors */ private RangeQuery < V > [ ] initRangeQueries ( Relation < V > relation , int dimensionality ) { } }
@ SuppressWarnings ( "unchecked" ) RangeQuery < V > [ ] rangeQueries = ( RangeQuery < V > [ ] ) new RangeQuery [ dimensionality ] ; for ( int d = 0 ; d < dimensionality ; d ++ ) { rangeQueries [ d ] = relation . getRangeQuery ( new PrimitiveDistanceQuery < > ( relation , new OnedimensionalDistanceFunction ( d ) ) ) ; } return rangeQueries ;
public class CmsLoginManager { /** * Adds an invalid attempt to login for the given user / IP to the storage . < p > * In case the configured threshold is reached , the user is disabled for the configured time . < p > * @ param userName the name of the user * @ param remoteAddress the remore address ( IP ) from which the login attempt was made */ protected void addInvalidLogin ( String userName , String remoteAddress ) { } }
if ( m_maxBadAttempts < 0 ) { // invalid login storage is disabled return ; } String key = createStorageKey ( userName , remoteAddress ) ; // look up the user in the storage CmsUserData userData = m_storage . get ( key ) ; if ( userData != null ) { // user data already contained in storage userData . increaseInvalidLoginCount ( ) ; } else { // create an new data object for this user userData = new CmsUserData ( ) ; m_storage . put ( key , userData ) ; }
public class DescribeProvisionedProductPlanResult { /** * Information about the resource changes that will occur when the plan is executed . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setResourceChanges ( java . util . Collection ) } or { @ link # withResourceChanges ( java . util . Collection ) } if you * want to override the existing values . * @ param resourceChanges * Information about the resource changes that will occur when the plan is executed . * @ return Returns a reference to this object so that method calls can be chained together . */ public DescribeProvisionedProductPlanResult withResourceChanges ( ResourceChange ... resourceChanges ) { } }
if ( this . resourceChanges == null ) { setResourceChanges ( new java . util . ArrayList < ResourceChange > ( resourceChanges . length ) ) ; } for ( ResourceChange ele : resourceChanges ) { this . resourceChanges . add ( ele ) ; } return this ;
public class ClientExecutorServiceProxy { /** * submit to members callback */ @ Override public void submitToMember ( Runnable command , Member member , ExecutionCallback callback ) { } }
Callable < ? > callable = createRunnableAdapter ( command ) ; submitToMember ( callable , member , callback ) ;
public class AppLinks { /** * Gets the target URL for an intent . If the intent is from an App Link , this will be the App Link target . * Otherwise , it return null ; For app link intent , this function will broadcast APP _ LINK _ NAVIGATE _ IN _ EVENT _ NAME event . * @ param context the context this function is called within . * @ param intent the incoming intent . * @ return the target URL for the intent if applink intent ; null otherwise . */ public static Uri getTargetUrlFromInboundIntent ( Context context , Intent intent ) { } }
Bundle appLinkData = getAppLinkData ( intent ) ; if ( appLinkData != null ) { String targetString = appLinkData . getString ( KEY_NAME_TARGET ) ; if ( targetString != null ) { MeasurementEvent . sendBroadcastEvent ( context , MeasurementEvent . APP_LINK_NAVIGATE_IN_EVENT_NAME , intent , null ) ; return Uri . parse ( targetString ) ; } } return null ;
public class RevisionDecoder { /** * Decodes the information , after the codec was successfully decoded , and * returns the Diff . * @ param blockSize _ C * length of a C block * @ param blockSize _ S * length of a S block * @ param blockSize _ E * length of a E block * @ param blockSize _ B * length of a B block * @ param blockSize _ L * length of a L block * @ return Diff * @ throws UnsupportedEncodingException * if the character encoding is unsupported * @ throws DecodingException * if the decoding failed */ private Diff decode ( final int blockSize_C , final int blockSize_S , final int blockSize_E , final int blockSize_B , final int blockSize_L ) throws UnsupportedEncodingException , DecodingException { } }
int code = r . read ( blockSize_C ) ; Diff diff = new Diff ( ) ; while ( code != - 1 ) { // System . out . print ( code + " \ t " ) ; switch ( DiffAction . parse ( code ) ) { case FULL_REVISION_UNCOMPRESSED : diff . add ( decodeFullRevision ( blockSize_L ) ) ; break ; case INSERT : diff . add ( decodeAdd ( blockSize_S , blockSize_L ) ) ; break ; case DELETE : diff . add ( decodeDelete ( blockSize_S , blockSize_E ) ) ; break ; case REPLACE : diff . add ( decodeReplace ( blockSize_S , blockSize_E , blockSize_L ) ) ; break ; case CUT : diff . add ( decodeCut ( blockSize_S , blockSize_E , blockSize_B ) ) ; break ; case PASTE : diff . add ( decodePaste ( blockSize_S , blockSize_B , r ) ) ; break ; default : throw new DecodingException ( "Invalid block_c code: " + code ) ; } // System . out . println ( ) ; code = r . read ( blockSize_C ) ; } return diff ;
public class CreateRegexPatternSetRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( CreateRegexPatternSetRequest createRegexPatternSetRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( createRegexPatternSetRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( createRegexPatternSetRequest . getName ( ) , NAME_BINDING ) ; protocolMarshaller . marshall ( createRegexPatternSetRequest . getChangeToken ( ) , CHANGETOKEN_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class DNS { /** * Returns the hostname associated with the specified IP address by the * provided nameserver . * @ param hostIp * The address to reverse lookup * @ param ns * The host name of a reachable DNS server * @ return The host name associated with the provided IP * @ throws NamingException * If a NamingException is encountered * @ deprecated Reliance on DNS is not preferred */ @ Deprecated public static String reverseDns ( InetAddress hostIp , String ns ) throws NamingException { } }
// Builds the reverse IP lookup form // This is formed by reversing the IP numbers and appending in - addr . arpa String [ ] parts = hostIp . getHostAddress ( ) . split ( "\\." ) ; String reverseIP = parts [ 3 ] + "." + parts [ 2 ] + "." + parts [ 1 ] + "." + parts [ 0 ] + ".in-addr.arpa" ; DirContext ictx = new InitialDirContext ( ) ; Attributes attribute = ictx . getAttributes ( "dns://" // Use " dns : / / / " if the default + ( ( ns == null ) ? "" : ns ) + // nameserver is to be used "/" + reverseIP , new String [ ] { "PTR" } ) ; ictx . close ( ) ; return attribute . get ( "PTR" ) . get ( ) . toString ( ) ;
public class JaversSchemaManager { /** * JaVers 3.9.2 to 3.9.3 schema migration ( MySql only ) */ private void alterMySqlCommitDateColumn ( ) { } }
ColumnType commitDateColType = getTypeOf ( getCommitTableNameWithSchema ( ) , "commit_date" ) ; if ( commitDateColType . typeName . equals ( "TIMESTAMP" ) && commitDateColType . precision == 19 ) { logger . info ( "migrating db schema from JaVers 3.9.2 to 3.9.3 (MySql only) ..." ) ; executeSQL ( "ALTER TABLE " + getCommitTableNameWithSchema ( ) + " MODIFY commit_date TIMESTAMP(3)" ) ; }
public class UnifiedResponseDefaultSettings { /** * HTTP Strict Transport Security ( HSTS ) is an opt - in security enhancement that * is specified by a web application through the use of a special response * header . Once a supported browser receives this header that browser will * prevent any communications from being sent over HTTP to the specified domain * and will instead send all communications over HTTPS . It also prevents HTTPS * click through prompts on browsers . The specification has been released and * published end of 2012 as RFC 6797 ( HTTP Strict Transport Security ( HSTS ) ) by * the IETF . * @ param nMaxAgeSeconds * number of seconds , after the reception of the STS header field , during * which the UA regards the host ( from whom the message was received ) as * a Known HSTS Host . * @ param bIncludeSubdomains * if enabled , this signals the UA that the HSTS Policy applies to this * HSTS Host as well as any sub - domains of the host ' s domain name . */ public static void setStrictTransportSecurity ( @ Nonnegative final int nMaxAgeSeconds , final boolean bIncludeSubdomains ) { } }
setResponseHeader ( CHttpHeader . STRICT_TRANSPORT_SECURITY , new CacheControlBuilder ( ) . setMaxAgeSeconds ( nMaxAgeSeconds ) . getAsHTTPHeaderValue ( ) + ( bIncludeSubdomains ? ";" + CHttpHeader . VALUE_INCLUDE_SUBDOMAINS : "" ) ) ;
public class ThreadPoolMonitor { /** * { @ inheritDoc } */ @ Override public void statisticCreated ( SPIStatistic s ) { } }
if ( s . getId ( ) == ACTIVE_THREADS ) { activeThreads = ( SPIBoundedRangeStatistic ) s ; } else if ( s . getId ( ) == POOL_SIZE ) { poolSize = ( SPIBoundedRangeStatistic ) s ; } else { if ( tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "Invlid stats found " + s ) ; } }
public class Variables { /** * Shortcut for calling { @ code Variables . fileValue ( name ) . file ( file ) . mimeType ( type ) . create ( ) } . * The name is set to the file name and the mime type is detected via { @ link MimetypesFileTypeMap } . */ public static FileValue fileValue ( File file ) { } }
String contentType = MimetypesFileTypeMap . getDefaultFileTypeMap ( ) . getContentType ( file ) ; return new FileValueBuilderImpl ( file . getName ( ) ) . file ( file ) . mimeType ( contentType ) . create ( ) ;
public class ComponentExposedTypeGenerator { /** * Transform a Java type name into a JavaScript type name . Takes care of primitive types . * @ param typeMirror A type to convert * @ return A String representing the JavaScript type name */ private String getNativeNameForJavaType ( TypeMirror typeMirror ) { } }
TypeName typeName = TypeName . get ( typeMirror ) ; if ( typeName . equals ( TypeName . INT ) || typeName . equals ( TypeName . BYTE ) || typeName . equals ( TypeName . SHORT ) || typeName . equals ( TypeName . LONG ) || typeName . equals ( TypeName . FLOAT ) || typeName . equals ( TypeName . DOUBLE ) ) { return "Number" ; } else if ( typeName . equals ( TypeName . BOOLEAN ) ) { return "Boolean" ; } else if ( typeName . equals ( TypeName . get ( String . class ) ) || typeName . equals ( TypeName . CHAR ) ) { return "String" ; } else if ( typeMirror . toString ( ) . startsWith ( JsArray . class . getCanonicalName ( ) ) ) { return "Array" ; } else { return "Object" ; }
public class ClusterManager { /** * This method is used when the ClusterManager is restarting after going down * while in Safe Mode . It starts the process of recovering the original * CM state by reading back the state in JSON form . * @ param hostsReader The HostsReader instance * @ throws IOException */ private void recoverClusterManagerFromDisk ( HostsFileReader hostsReader ) throws IOException { } }
LOG . info ( "Restoring state from " + new java . io . File ( conf . getCMStateFile ( ) ) . getAbsolutePath ( ) ) ; // This will prevent the expireNodes and expireSessions threads from // expiring the nodes and sessions respectively safeMode = true ; LOG . info ( "Safe mode is now: " + ( this . safeMode ? "ON" : "OFF" ) ) ; CoronaSerializer coronaSerializer = new CoronaSerializer ( conf ) ; // Expecting the START _ OBJECT token for ClusterManager coronaSerializer . readStartObjectToken ( "ClusterManager" ) ; coronaSerializer . readField ( "startTime" ) ; startTime = coronaSerializer . readValueAs ( Long . class ) ; coronaSerializer . readField ( "nodeManager" ) ; nodeManager = new NodeManager ( this , hostsReader , coronaSerializer ) ; nodeManager . setConf ( conf ) ; coronaSerializer . readField ( "sessionManager" ) ; sessionManager = new SessionManager ( this , coronaSerializer ) ; coronaSerializer . readField ( "sessionNotifier" ) ; sessionNotifier = new SessionNotifier ( sessionManager , this , metrics , coronaSerializer ) ; // Expecting the END _ OBJECT token for ClusterManager coronaSerializer . readEndObjectToken ( "ClusterManager" ) ; lastRestartTime = clock . getTime ( ) ;
public class SpannableStringInternal { /** * / * subclasses must do subSequence ( ) to preserve type */ public final void getChars ( int start , int end , char [ ] dest , int off ) { } }
mText . getChars ( start , end , dest , off ) ;
public class CmsCoreProvider { /** * Returns the adjusted site root for a resource using the provided site root as a base . < p > * Usually , this would be the site root for the current site . * However , if a resource from the < code > / system / < / code > folder is requested , * this will be the empty String . < p > * @ param siteRoot the site root of the current site * @ param resourcename the resource name to get the adjusted site root for * @ return the adjusted site root for the resource */ public String getAdjustedSiteRoot ( String siteRoot , String resourcename ) { } }
if ( resourcename . startsWith ( VFS_PATH_SYSTEM ) ) { return "" ; } else { return siteRoot ; }
public class DescribeRouteTablesResult { /** * Information about one or more route tables . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setRouteTables ( java . util . Collection ) } or { @ link # withRouteTables ( java . util . Collection ) } if you want to * override the existing values . * @ param routeTables * Information about one or more route tables . * @ return Returns a reference to this object so that method calls can be chained together . */ public DescribeRouteTablesResult withRouteTables ( RouteTable ... routeTables ) { } }
if ( this . routeTables == null ) { setRouteTables ( new com . amazonaws . internal . SdkInternalList < RouteTable > ( routeTables . length ) ) ; } for ( RouteTable ele : routeTables ) { this . routeTables . add ( ele ) ; } return this ;
public class AbstractMap { /** * Chooses a new prime table capacity optimized for shrinking that ( approximately ) satisfies the invariant * < tt > c * minLoadFactor < = size < = c * maxLoadFactor < / tt > * and has at least one FREE slot for the given size . */ protected int chooseShrinkCapacity ( int size , double minLoad , double maxLoad ) { } }
return nextPrime ( Math . max ( size + 1 , ( int ) ( ( 4 * size / ( minLoad + 3 * maxLoad ) ) ) ) ) ;
public class SloppyMath { /** * Exponentiation like we learned in grade school : * multiply b by itself e times . Uses power of two trick . * e must be nonnegative ! ! ! no checking ! ! ! * @ param b base * @ param e exponent * @ return b ^ e */ public static double intPow ( double b , int e ) { } }
double result = 1.0 ; double currPow = b ; while ( e > 0 ) { if ( ( e & 1 ) != 0 ) { result *= currPow ; } currPow *= currPow ; e >>= 1 ; } return result ;
public class SFSUtilities { /** * Return the SRID of the first geometry column of the input table * @ param connection Active connection * @ param table Table name * @ return The SRID of the first geometry column * @ throws SQLException */ public static int getSRID ( Connection connection , TableLocation table ) throws SQLException { } }
ResultSet geomResultSet = getGeometryColumnsView ( connection , table . getCatalog ( ) , table . getSchema ( ) , table . getTable ( ) ) ; int srid = 0 ; while ( geomResultSet . next ( ) ) { srid = geomResultSet . getInt ( "srid" ) ; break ; } geomResultSet . close ( ) ; return srid ;
public class PrettyPrinter { /** * Pretty - print a single object . */ private static Object formatObject ( Object object ) { } }
if ( object instanceof Class ) { return formatArg ( ( Class < ? > ) object ) ; } else if ( object instanceof Key ) { return formatArg ( ( Key < ? > ) object ) ; } else if ( object instanceof List ) { List < ? > list = ( List < ? > ) object ; // Empirically check if this is a List < Dependency > . boolean allDependencies = true ; for ( Object entry : list ) { if ( ! ( entry instanceof Dependency ) ) { allDependencies = false ; break ; } } if ( allDependencies ) { return formatArg ( ( List < Dependency > ) list ) ; } else { return object ; } } else { return object ; }
public class AmazonEC2Client { /** * Describes the specified Dedicated Hosts or all your Dedicated Hosts . * The results describe only the Dedicated Hosts in the Region you ' re currently using . All listed instances consume * capacity on your Dedicated Host . Dedicated Hosts that have recently been released are listed with the state * < code > released < / code > . * @ param describeHostsRequest * @ return Result of the DescribeHosts operation returned by the service . * @ sample AmazonEC2 . DescribeHosts * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / ec2-2016-11-15 / DescribeHosts " target = " _ top " > AWS API * Documentation < / a > */ @ Override public DescribeHostsResult describeHosts ( DescribeHostsRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeDescribeHosts ( request ) ;
public class ThemeUtil { /** * Applies one or more theme classes to a component . * @ param component Component to receive the theme classes . * @ param themeClasses A list of theme classes to apply . */ public static void applyThemeClass ( BaseUIComponent component , IThemeClass ... themeClasses ) { } }
StringBuilder sb = new StringBuilder ( ) ; for ( IThemeClass themeClass : themeClasses ) { String cls = themeClass == null ? null : themeClass . getThemeClass ( ) ; if ( cls != null ) { sb . append ( sb . length ( ) > 0 ? " " : "" ) . append ( themeClass . getThemeClass ( ) ) ; } } component . addClass ( sb . toString ( ) ) ;
public class MatrixPrinter { /** * Prints the matrix as specified by the supplied < code > Control < / code > instance . The matrix * elements are formatted using the specified * < a href = " http : / / java . sun . com / javase / 6 / docs / api / java / util / Formatter . html # syntax " > format * string < / a > and locale . The format string is expected to receive a single * argument of type double . If < code > locale < / code > is < code > null < / code > then no localization * is applied . * @ param m the matrix to print * @ param control the control instance specifying the destination and format - related * parameters * @ param format the format string * @ param locale the locale to apply ; if it is < code > null < / code > , then no localization is * performed * @ throws NullPointerException if the matrix , control , or format string is < code > null < / code > * @ throws java . util . IllegalFormatException if the format string is incorrect */ public static void print ( Matrix m , Control control , String format , Locale locale ) { } }
print ( m , control , new FormatterNumberFormatter ( format , locale ) ) ;
public class LongStreamEx { /** * Returns a new { @ code LongStreamEx } which is a concatenation of this * stream and the stream containing supplied values * This is a < a href = " package - summary . html # StreamOps " > quasi - intermediate * operation < / a > . * @ param values the values to append to the stream * @ return the new stream */ public LongStreamEx append ( long ... values ) { } }
if ( values . length == 0 ) return this ; return new LongStreamEx ( LongStream . concat ( stream ( ) , LongStream . of ( values ) ) , context ) ;
public class CommonSteps { /** * Checks if html input text contains expected value . * @ param page * The concerned page of elementName * @ param elementName * The key of the PageElement to check * @ param textOrKey * Is the new data ( text or text in context ( after a save ) ) * @ param conditions * list of ' expected ' values condition and ' actual ' values ( { @ link com . github . noraui . gherkin . GherkinStepCondition } ) . * @ throws TechnicalException * is throws if you have a technical error ( format , configuration , datas , . . . ) in NoraUi . * Exception with message and with screenshot and with exception if functional error but no screenshot and no exception if technical error . * @ throws FailureException * if the scenario encounters a functional error */ @ Conditioned @ Et ( "Je vérifie le texte '(.*)-(.*)' avec '(.*)'[\\.|\\?]" ) @ And ( "I check text '(.*)-(.*)' with '(.*)'[\\.|\\?]" ) public void checkInputText ( String page , String elementName , String textOrKey , List < GherkinStepCondition > conditions ) throws FailureException , TechnicalException { } }
if ( ! checkInputText ( Page . getInstance ( page ) . getPageElementByKey ( '-' + elementName ) , textOrKey ) ) { checkText ( Page . getInstance ( page ) . getPageElementByKey ( '-' + elementName ) , textOrKey ) ; }
public class DestinationManager { /** * < p > This method is used to alter a destination that is localised on this messaging * engine . < / p > * @ param destinationLocalizationDefinition * @ param destinationDefinition * @ return * @ throws SIIncorrectCallException * @ throws SIMPDestinationAlreadyExistsException * @ throws SINotPossibleInCurrentConfigurationException */ public void alterDestinationLocalization ( DestinationDefinition destinationDefinition , LocalizationDefinition destinationLocalizationDefinition , Set < String > destinationLocalizingMEs ) throws SIResourceException , SIIncorrectCallException , SINotPossibleInCurrentConfigurationException { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "alterDestinationLocalization" , new Object [ ] { destinationDefinition , destinationLocalizationDefinition , destinationLocalizingMEs } ) ; // Create a local UOW LocalTransaction transaction = txManager . createLocalTransaction ( true ) ; // Try to alter the local destination . try { alterDestinationLocalization ( destinationDefinition , destinationLocalizationDefinition , destinationLocalizingMEs , transaction ) ; // If everything was successful then commit the unit of work transaction . commit ( ) ; } catch ( SIResourceException e ) { // No FFDC code needed if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "alterDestinationLocalization" , e ) ; handleRollback ( transaction ) ; throw e ; } catch ( SITemporaryDestinationNotFoundException e ) { // No FFDC code needed // Should never occur as you can ' t alter a temporary destination . if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "alterDestinationLocalization" , e ) ; handleRollback ( transaction ) ; throw new SIErrorException ( e ) ; } catch ( RuntimeException e ) { // FFDC FFDCFilter . processException ( e , "com.ibm.ws.sib.processor.impl.DestinationManager.alterDestinationLocalization" , "1:4353:1.508.1.7" , this ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) { SibTr . exception ( tc , e ) ; SibTr . exit ( tc , "alterDestinationLocalization" , e ) ; } handleRollback ( transaction ) ; throw e ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "alterDestinationLocalization" ) ;
public class EntityInfo { /** * 获取Entity的QUERY SQL * @ param bean Entity对象 * @ return String */ public String getQueryNamesPrepareSQL ( T bean ) { } }
if ( this . tableStrategy == null ) return queryNamesPrepareSQL ; return queryNamesPrepareSQL . replace ( "${newtable}" , getTable ( bean ) ) ;
public class Config { /** * Read config object stored in JSON format from < code > URL < / code > * @ param url object * @ return config * @ throws IOException error */ public static Config fromJSON ( URL url ) throws IOException { } }
ConfigSupport support = new ConfigSupport ( ) ; return support . fromJSON ( url , Config . class ) ;
public class CliFrontend { private JobID parseJobId ( String jobIdString ) throws CliArgsException { } }
if ( jobIdString == null ) { throw new CliArgsException ( "Missing JobId" ) ; } final JobID jobId ; try { jobId = JobID . fromHexString ( jobIdString ) ; } catch ( IllegalArgumentException e ) { throw new CliArgsException ( e . getMessage ( ) ) ; } return jobId ;
public class PlatformDescription { /** * The frameworks supported by the platform . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setFrameworks ( java . util . Collection ) } or { @ link # withFrameworks ( java . util . Collection ) } if you want to * override the existing values . * @ param frameworks * The frameworks supported by the platform . * @ return Returns a reference to this object so that method calls can be chained together . */ public PlatformDescription withFrameworks ( PlatformFramework ... frameworks ) { } }
if ( this . frameworks == null ) { setFrameworks ( new com . amazonaws . internal . SdkInternalList < PlatformFramework > ( frameworks . length ) ) ; } for ( PlatformFramework ele : frameworks ) { this . frameworks . add ( ele ) ; } return this ;
public class PlNationalIdentificationNumberProvider { /** * This should be tested */ private int calculateSexCode ( Person . Sex sex ) { } }
return SEX_FIELDS [ baseProducer . randomInt ( SEX_FIELDS . length - 1 ) ] + ( sex == Person . Sex . MALE ? 1 : 0 ) ;
public class XSONByteArray { public void writeString ( String val ) { } }
int x = val . length ( ) ; // char length if ( ( x & 0xFFFFFF00 ) == 0 ) { byte [ ] buffer = getBuffer ( 2 ) ; buffer [ index ++ ] = XsonConst . STRING1 ; buffer [ index ++ ] = ( byte ) x ; } else if ( ( x & 0xFFFF0000 ) == 0 ) { byte [ ] buffer = getBuffer ( 3 ) ; buffer [ index ++ ] = XsonConst . STRING2 ; buffer [ index ++ ] = ( byte ) ( x >> 8 ) ; buffer [ index ++ ] = ( byte ) x ; } else if ( ( x & 0xFF000000 ) == 0 ) { byte [ ] buffer = getBuffer ( 4 ) ; buffer [ index ++ ] = XsonConst . STRING3 ; buffer [ index ++ ] = ( byte ) ( x >> 16 ) ; buffer [ index ++ ] = ( byte ) ( x >> 8 ) ; buffer [ index ++ ] = ( byte ) x ; } else { byte [ ] buffer = getBuffer ( 5 ) ; buffer [ index ++ ] = XsonConst . STRING ; buffer [ index ++ ] = ( byte ) ( x >> 24 ) ; buffer [ index ++ ] = ( byte ) ( x >> 16 ) ; buffer [ index ++ ] = ( byte ) ( x >> 8 ) ; buffer [ index ++ ] = ( byte ) x ; } // empty string if ( 0 == x ) { return ; } int remainingCapacity = 0 ; int start = 0 ; int end = 0 ; int can = 0 ; while ( true ) { // 查看Item剩余容量 remainingCapacity = getItemRemainingCapacity ( ) ; if ( remainingCapacity < 3 ) { increaseItem ( ( x - start ) * 3 ) ; // 新建Item remainingCapacity = getItemRemainingCapacity ( ) ; } // 计算可以处理的长度 can = remainingCapacity / 3 ; end = start + can ; if ( end > x ) { end = x ; } byte [ ] buffer = item . getArray ( ) ; for ( int i = start ; i < end ; i ++ ) { char ch = val . charAt ( i ) ; if ( ch < 0x80 ) buffer [ index ++ ] = ( byte ) ( ch ) ; else if ( ch < 0x800 ) { buffer [ index ++ ] = ( byte ) ( 0xc0 + ( ( ch >> 6 ) & 0x1f ) ) ; buffer [ index ++ ] = ( byte ) ( 0x80 + ( ch & 0x3f ) ) ; } else { buffer [ index ++ ] = ( byte ) ( 0xe0 + ( ( ch >> 12 ) & 0xf ) ) ; buffer [ index ++ ] = ( byte ) ( 0x80 + ( ( ch >> 6 ) & 0x3f ) ) ; buffer [ index ++ ] = ( byte ) ( 0x80 + ( ch & 0x3f ) ) ; } } start += can ; if ( start >= x ) { break ; } }