signature stringlengths 43 39.1k | implementation stringlengths 0 450k |
|---|---|
public class TimestampDataPublisher { /** * Update destination path to put db and table name in format " dbname . tablename " using { @ link # getDbTableName ( String ) }
* and include timestamp
* Input dst format : { finaldir } / { schemaName }
* Output dst format : { finaldir } / { dbname . tablename } / { currenttimestamp } */
@ Override protected void movePath ( ParallelRunner parallelRunner , State state , Path src , Path dst , int branchId ) throws IOException { } } | String outputDir = dst . getParent ( ) . toString ( ) ; String schemaName = dst . getName ( ) ; Path newDst = new Path ( new Path ( outputDir , getDbTableName ( schemaName ) ) , timestamp ) ; if ( ! this . publisherFileSystemByBranches . get ( branchId ) . exists ( newDst ) ) { WriterUtils . mkdirsWithRecursivePermissionWithRetry ( this . publisherFileSystemByBranches . get ( branchId ) , newDst . getParent ( ) , this . permissions . get ( branchId ) , this . retrierConfig ) ; } super . movePath ( parallelRunner , state , src , newDst , branchId ) ; |
public class RfLocalSessionDataFactory { /** * / * ( non - Javadoc )
* @ see org . jdiameter . common . api . app . IAppSessionDataFactory # getAppSessionData ( java . lang . Class , java . lang . String ) */
@ Override public IRfSessionData getAppSessionData ( Class < ? extends AppSession > clazz , String sessionId ) { } } | if ( clazz . equals ( ClientRfSession . class ) ) { ClientRfSessionDataLocalImpl data = new ClientRfSessionDataLocalImpl ( ) ; data . setSessionId ( sessionId ) ; return data ; } else if ( clazz . equals ( ServerRfSession . class ) ) { ServerRfSessionDataLocalImpl data = new ServerRfSessionDataLocalImpl ( ) ; data . setSessionId ( sessionId ) ; return data ; } throw new IllegalArgumentException ( clazz . toString ( ) ) ; |
public class DatabaseInformationFull { /** * Retrieves the system table corresponding to the specified index . < p >
* @ param tableIndex index identifying the system table to generate
* @ return the system table corresponding to the specified index */
protected Table generateTable ( int tableIndex ) { } } | switch ( tableIndex ) { case SYSTEM_UDTS : return SYSTEM_UDTS ( ) ; case SYSTEM_VERSIONCOLUMNS : return SYSTEM_VERSIONCOLUMNS ( ) ; // HSQLDB - specific
case SYSTEM_CACHEINFO : return SYSTEM_CACHEINFO ( ) ; case SYSTEM_SESSIONINFO : return SYSTEM_SESSIONINFO ( ) ; case SYSTEM_PROPERTIES : return SYSTEM_PROPERTIES ( ) ; case SYSTEM_SESSIONS : return SYSTEM_SESSIONS ( ) ; case SYSTEM_TEXTTABLES : return SYSTEM_TEXTTABLES ( ) ; // SQL views
case ADMINISTRABLE_ROLE_AUTHORIZATIONS : return ADMINISTRABLE_ROLE_AUTHORIZATIONS ( ) ; case APPLICABLE_ROLES : return APPLICABLE_ROLES ( ) ; case ASSERTIONS : return ASSERTIONS ( ) ; case AUTHORIZATIONS : return AUTHORIZATIONS ( ) ; case CHARACTER_SETS : return CHARACTER_SETS ( ) ; case CHECK_CONSTRAINT_ROUTINE_USAGE : return CHECK_CONSTRAINT_ROUTINE_USAGE ( ) ; case CHECK_CONSTRAINTS : return CHECK_CONSTRAINTS ( ) ; case COLLATIONS : return COLLATIONS ( ) ; case COLUMN_COLUMN_USAGE : return COLUMN_COLUMN_USAGE ( ) ; case COLUMN_DOMAIN_USAGE : return COLUMN_DOMAIN_USAGE ( ) ; case COLUMN_UDT_USAGE : return COLUMN_UDT_USAGE ( ) ; case CONSTRAINT_COLUMN_USAGE : return CONSTRAINT_COLUMN_USAGE ( ) ; case CONSTRAINT_TABLE_USAGE : return CONSTRAINT_TABLE_USAGE ( ) ; case COLUMNS : return COLUMNS ( ) ; case DATA_TYPE_PRIVILEGES : return DATA_TYPE_PRIVILEGES ( ) ; case DOMAIN_CONSTRAINTS : return DOMAIN_CONSTRAINTS ( ) ; case DOMAINS : return DOMAINS ( ) ; case ENABLED_ROLES : return ENABLED_ROLES ( ) ; case JAR_JAR_USAGE : return JAR_JAR_USAGE ( ) ; case JARS : return JARS ( ) ; case KEY_COLUMN_USAGE : return KEY_COLUMN_USAGE ( ) ; case METHOD_SPECIFICATIONS : return METHOD_SPECIFICATIONS ( ) ; case MODULE_COLUMN_USAGE : return MODULE_COLUMN_USAGE ( ) ; case MODULE_PRIVILEGES : return MODULE_PRIVILEGES ( ) ; case MODULE_TABLE_USAGE : return MODULE_TABLE_USAGE ( ) ; case MODULES : return MODULES ( ) ; case PARAMETERS : return PARAMETERS ( ) ; case REFERENTIAL_CONSTRAINTS : return REFERENTIAL_CONSTRAINTS ( ) ; case ROLE_AUTHORIZATION_DESCRIPTORS : return ROLE_AUTHORIZATION_DESCRIPTORS ( ) ; case ROLE_COLUMN_GRANTS : return ROLE_COLUMN_GRANTS ( ) ; case ROLE_ROUTINE_GRANTS : return ROLE_ROUTINE_GRANTS ( ) ; case ROLE_TABLE_GRANTS : return ROLE_TABLE_GRANTS ( ) ; case ROLE_USAGE_GRANTS : return ROLE_USAGE_GRANTS ( ) ; case ROLE_UDT_GRANTS : return ROLE_UDT_GRANTS ( ) ; case ROUTINE_COLUMN_USAGE : return ROUTINE_COLUMN_USAGE ( ) ; case ROUTINE_JAR_USAGE : return ROUTINE_JAR_USAGE ( ) ; case ROUTINE_PRIVILEGES : return ROUTINE_PRIVILEGES ( ) ; case ROUTINE_ROUTINE_USAGE : return ROUTINE_ROUTINE_USAGE ( ) ; case ROUTINE_SEQUENCE_USAGE : return ROUTINE_SEQUENCE_USAGE ( ) ; case ROUTINE_TABLE_USAGE : return ROUTINE_TABLE_USAGE ( ) ; case ROUTINES : return ROUTINES ( ) ; case SCHEMATA : return SCHEMATA ( ) ; case SEQUENCES : return SEQUENCES ( ) ; case SQL_FEATURES : return SQL_FEATURES ( ) ; case SQL_IMPLEMENTATION_INFO : return SQL_IMPLEMENTATION_INFO ( ) ; case SQL_PACKAGES : return SQL_PACKAGES ( ) ; case SQL_PARTS : return SQL_PARTS ( ) ; case SQL_SIZING : return SQL_SIZING ( ) ; case SQL_SIZING_PROFILES : return SQL_SIZING_PROFILES ( ) ; case TABLE_CONSTRAINTS : return TABLE_CONSTRAINTS ( ) ; case TABLES : return TABLES ( ) ; case TRANSLATIONS : return TRANSLATIONS ( ) ; case TRIGGERED_UPDATE_COLUMNS : return TRIGGERED_UPDATE_COLUMNS ( ) ; case TRIGGER_COLUMN_USAGE : return TRIGGER_COLUMN_USAGE ( ) ; case TRIGGER_ROUTINE_USAGE : return TRIGGER_ROUTINE_USAGE ( ) ; case TRIGGER_SEQUENCE_USAGE : return TRIGGER_SEQUENCE_USAGE ( ) ; case TRIGGER_TABLE_USAGE : return TRIGGER_TABLE_USAGE ( ) ; case TRIGGERS : return TRIGGERS ( ) ; case USAGE_PRIVILEGES : return USAGE_PRIVILEGES ( ) ; case USER_DEFINED_TYPES : return USER_DEFINED_TYPES ( ) ; case VIEW_COLUMN_USAGE : return VIEW_COLUMN_USAGE ( ) ; case VIEW_ROUTINE_USAGE : return VIEW_ROUTINE_USAGE ( ) ; case VIEW_TABLE_USAGE : return VIEW_TABLE_USAGE ( ) ; case VIEWS : return VIEWS ( ) ; default : return super . generateTable ( tableIndex ) ; } |
public class EntryStream { /** * Returns a stream consisting of the elements of this stream which keys are
* instances of given class .
* This is an < a href = " package - summary . html # StreamOps " > intermediate < / a >
* operation .
* @ param < KK > a type of keys to select .
* @ param clazz a class to filter the keys .
* @ return the new stream */
@ SuppressWarnings ( { } } | "unchecked" } ) public < KK > EntryStream < KK , V > selectKeys ( Class < KK > clazz ) { return ( EntryStream < KK , V > ) filter ( e -> clazz . isInstance ( e . getKey ( ) ) ) ; |
public class ContextEntry { /** * The value ( or values , if the condition context key supports multiple values ) to provide to the simulation when
* the key is referenced by a < code > Condition < / code > element in an input policy .
* @ return The value ( or values , if the condition context key supports multiple values ) to provide to the simulation
* when the key is referenced by a < code > Condition < / code > element in an input policy . */
public java . util . List < String > getContextKeyValues ( ) { } } | if ( contextKeyValues == null ) { contextKeyValues = new com . amazonaws . internal . SdkInternalList < String > ( ) ; } return contextKeyValues ; |
public class KamSummarizer { /** * returns number unique gene reference
* @ param edges
* @ return */
private int getUniqueGeneReference ( Collection < KamNode > nodes ) { } } | // count all protienAbundance reference
Set < String > uniqueLabels = new HashSet < String > ( ) ; for ( KamNode node : nodes ) { if ( node . getFunctionType ( ) == FunctionEnum . PROTEIN_ABUNDANCE && StringUtils . countMatches ( node . getLabel ( ) , "(" ) == 1 && StringUtils . countMatches ( node . getLabel ( ) , ")" ) == 1 ) { uniqueLabels . add ( node . getLabel ( ) ) ; } } return uniqueLabels . size ( ) ; |
public class MetaMasterSync { /** * Heartbeats to the leader master node . */
@ Override public void heartbeat ( ) { } } | MetaCommand command = null ; try { if ( mMasterId . get ( ) == UNINITIALIZED_MASTER_ID ) { setIdAndRegister ( ) ; } command = mMasterClient . heartbeat ( mMasterId . get ( ) ) ; handleCommand ( command ) ; } catch ( IOException e ) { // An error occurred , log and ignore it or error if heartbeat timeout is reached
if ( command == null ) { LOG . error ( "Failed to receive leader master heartbeat command." , e ) ; } else { LOG . error ( "Failed to execute leader master heartbeat command: {}" , command , e ) ; } mMasterClient . disconnect ( ) ; } |
public class Alternatives { /** * Returns the first object that is not null
* @ param objects The objects to process
* @ return The first value that is not null . null when there is no not - null value */
public static < T > T firstNotNull ( final T ... objects ) { } } | for ( final T object : objects ) { if ( object != null ) { return object ; } } return null ; |
public class FavoritesInner { /** * Gets a list of favorites defined within an Application Insights component .
* @ param resourceGroupName The name of the resource group .
* @ param resourceName The name of the Application Insights component resource .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ throws CloudException thrown if the request is rejected by server
* @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
* @ return the List & lt ; ApplicationInsightsComponentFavoriteInner & gt ; object if successful . */
public List < ApplicationInsightsComponentFavoriteInner > list ( String resourceGroupName , String resourceName ) { } } | return listWithServiceResponseAsync ( resourceGroupName , resourceName ) . toBlocking ( ) . single ( ) . body ( ) ; |
public class BitemporalCondition { /** * { @ inheritDoc } */
@ Override public MoreObjects . ToStringHelper toStringHelper ( ) { } } | return toStringHelper ( this ) . add ( "vtFrom" , vtFrom ) . add ( "vtTo" , vtTo ) . add ( "ttFrom" , ttFrom ) . add ( "ttTo" , ttTo ) ; |
public class CCBAPIClient { /** * Build the URI for a particular service call .
* @ param service The CCB API service to call ( i . e . the srv query parameter ) .
* @ param parameters A map of query parameters to include on the URI .
* @ return The apiBaseUri with the additional query parameters appended . */
private URI makeURI ( final String service , final Map < String , String > parameters ) { } } | try { StringBuilder queryStringBuilder = new StringBuilder ( ) ; if ( apiBaseUri . getQuery ( ) != null ) { queryStringBuilder . append ( apiBaseUri . getQuery ( ) ) . append ( "&" ) ; } queryStringBuilder . append ( "srv=" ) . append ( service ) ; for ( Map . Entry < String , String > entry : parameters . entrySet ( ) ) { queryStringBuilder . append ( "&" ) . append ( entry . getKey ( ) ) . append ( "=" ) . append ( entry . getValue ( ) ) ; } return new URI ( apiBaseUri . getScheme ( ) , apiBaseUri . getAuthority ( ) , apiBaseUri . getPath ( ) , queryStringBuilder . toString ( ) , apiBaseUri . getFragment ( ) ) ; } catch ( URISyntaxException e ) { // This shouldn ' t happen , but needs to be caught regardless .
throw new AssertionError ( "Could not construct API URI" , e ) ; } |
public class LMCLUS { /** * Deviation from a manifold described by beta .
* @ param delta Delta from origin vector
* @ param beta Manifold
* @ return Deviation score */
private double deviation ( double [ ] delta , double [ ] [ ] beta ) { } } | final double a = squareSum ( delta ) ; final double b = squareSum ( transposeTimes ( beta , delta ) ) ; return ( a > b ) ? FastMath . sqrt ( a - b ) : 0. ; |
public class RingBuffer { /** * Shutdown this ring buffer by preventing any further entries , but allowing all existing entries to be processed by all
* consumers . */
public void shutdown ( ) { } } | // Prevent new entries from being added . . .
this . addEntries . set ( false ) ; // Mark the cursor as being finished ; this will stop all consumers from waiting for a batch . . .
this . cursor . complete ( ) ; // Each of the consumer threads will complete the batch they ' re working on , but will then terminate . . .
// Stop the garbage collection thread ( if running ) . . .
if ( this . gcConsumer != null ) this . gcConsumer . close ( ) ; // Now , block until all the runners have completed . . .
for ( ConsumerRunner runner : new HashSet < > ( consumers ) ) { // use a copy of the runners ; they ' re removed when they close
runner . waitForCompletion ( ) ; } assert consumers . isEmpty ( ) ; |
public class FileUtils { /** * Deletes the path if it is empty . A path can only be empty if it is a directory which does
* not contain any other directories / files .
* @ param fileSystem to use
* @ param path to be deleted if empty
* @ return true if the path could be deleted ; otherwise false
* @ throws IOException if the delete operation fails */
public static boolean deletePathIfEmpty ( FileSystem fileSystem , Path path ) throws IOException { } } | final FileStatus [ ] fileStatuses ; try { fileStatuses = fileSystem . listStatus ( path ) ; } catch ( FileNotFoundException e ) { // path already deleted
return true ; } catch ( Exception e ) { // could not access directory , cannot delete
return false ; } // if there are no more files or if we couldn ' t list the file status try to delete the path
if ( fileStatuses == null ) { // another indicator of " file not found "
return true ; } else if ( fileStatuses . length == 0 ) { // attempt to delete the path ( will fail and be ignored if the path now contains
// some files ( possibly added concurrently ) )
return fileSystem . delete ( path , false ) ; } else { return false ; } |
public class GosuParser { public ArrayList < ISymbol > parseParameterDeclarationList ( IParsedElement element , boolean bStatic , List < IType > inferredArgumentTypes ) { } } | return parseParameterDeclarationList ( element , bStatic , inferredArgumentTypes , false , false , false , false ) ; |
public class GoogleCloudStorageImpl { /** * Helper for converting from a Map & lt ; String , byte [ ] & gt ; metadata map that may be in a
* StorageObject into a Map & lt ; String , String & gt ; suitable for placement inside a
* GoogleCloudStorageItemInfo . */
@ VisibleForTesting static Map < String , String > encodeMetadata ( Map < String , byte [ ] > metadata ) { } } | return Maps . transformValues ( metadata , ENCODE_METADATA_VALUES ) ; |
public class JsonEscape { /** * Perform a JSON < strong > unescape < / strong > operation on a < tt > String < / tt > input , writing
* results to a < tt > Writer < / tt > .
* No additional configuration arguments are required . Unescape operations
* will always perform < em > complete < / em > JSON unescape of SECs and u - based escapes .
* This method is < strong > thread - safe < / strong > .
* @ param text the < tt > String < / tt > to be unescaped .
* @ param writer the < tt > java . io . Writer < / tt > to which the unescaped result will be written . Nothing will
* be written at all to this writer if input is < tt > null < / tt > .
* @ throws IOException if an input / output exception occurs
* @ since 1.1.2 */
public static void unescapeJson ( final String text , final Writer writer ) throws IOException { } } | if ( writer == null ) { throw new IllegalArgumentException ( "Argument 'writer' cannot be null" ) ; } if ( text == null ) { return ; } if ( text . indexOf ( '\\' ) < 0 ) { // Fail fast , avoid more complex ( and less JIT - table ) method to execute if not needed
writer . write ( text ) ; return ; } JsonEscapeUtil . unescape ( new InternalStringReader ( text ) , writer ) ; |
public class MapLayer { /** * Replies if the specified point ( < var > x < / var > , < var > y < / var > )
* was inside the figure of this MapElement .
* < p > If this MapElement has no associated figure , this method
* always returns < code > false < / code > .
* @ param point is a geo - referenced coordinate
* @ param delta is the geo - referenced distance that corresponds to a approximation
* distance in the screen coordinate system
* @ return < code > true < / code > if this MapElement had an associated figure and
* the specified point was inside this bounds of this figure , otherwhise
* < code > false < / code > */
@ Pure public boolean contains ( Point2D < ? , ? > point , double delta ) { } } | final Rectangle2d bounds = getBoundingBox ( ) ; if ( bounds == null ) { return false ; } double dlt = delta ; if ( dlt < 0 ) { dlt = - dlt ; } if ( dlt == 0 ) { return bounds . contains ( point ) ; } return Circle2afp . intersectsCircleRectangle ( point . getX ( ) , point . getY ( ) , dlt , bounds . getMinX ( ) , bounds . getMinY ( ) , bounds . getMaxX ( ) , bounds . getMaxY ( ) ) ; |
public class Boolean2IntFieldConversion { /** * @ see FieldConversion # sqlToJava ( Object ) */
public Object sqlToJava ( Object source ) { } } | if ( source instanceof Integer ) { if ( source . equals ( I_TRUE ) ) { return Boolean . TRUE ; } else { return Boolean . FALSE ; } } else { return source ; } |
public class MainLogRepositoryBrowserImpl { /** * ( non - Javadoc )
* @ see com . ibm . ws . logging . hpel . LogInstanceBrowser # findNext ( com . ibm . ws . logging . hpel . impl . RepositoryPointerImpl , long ) */
@ Override public LogRepositoryBrowser findNext ( RepositoryPointerImpl location , long timelimit ) { } } | String [ ] instanceIds = location . getInstanceIds ( ) ; if ( instanceIds . length == 0 ) { logger . logp ( Level . SEVERE , className , "findNext" , "HPEL_NotRepositoryLocation" ) ; return null ; } return findNext ( parseTimeStamp ( instanceIds [ 0 ] ) , timelimit ) ; |
public class JKTypeMapping { /** * Gets the type .
* @ param typeNumber the type number
* @ return the type */
public static JKType getType ( int typeNumber ) { } } | JKType sqlDataType = codeToJKTypeMapping . get ( typeNumber ) ; if ( sqlDataType == null ) { logger . debug ( "No mapping found for datatype , default mapping will return " + typeNumber ) ; return DEFAULT_MAPPING ; } return sqlDataType ; |
public class AuthorizationRequestUrl { /** * Sets the < a href = " http : / / tools . ietf . org / html / rfc6749 # section - 3.1.1 " > response type < / a > , which
* must be { @ code " code " } for requesting an authorization code , { @ code " token " } for requesting an
* access token ( implicit grant ) , or a list of registered extension values to join with a space .
* Overriding is only supported for the purpose of calling the super implementation and changing
* the return type , but nothing else .
* @ since 1.15 */
public AuthorizationRequestUrl setResponseTypes ( Collection < String > responseTypes ) { } } | this . responseTypes = Joiner . on ( ' ' ) . join ( responseTypes ) ; return this ; |
public class syslog_snmp { /** * < pre >
* Report for snmp syslog message received by this collector . .
* < / pre > */
public static syslog_snmp [ ] get ( nitro_service client ) throws Exception { } } | syslog_snmp resource = new syslog_snmp ( ) ; resource . validate ( "get" ) ; return ( syslog_snmp [ ] ) resource . get_resources ( client ) ; |
public class Mp4SettingsMarshaller { /** * Marshall the given parameter object . */
public void marshall ( Mp4Settings mp4Settings , ProtocolMarshaller protocolMarshaller ) { } } | if ( mp4Settings == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( mp4Settings . getCslgAtom ( ) , CSLGATOM_BINDING ) ; protocolMarshaller . marshall ( mp4Settings . getFreeSpaceBox ( ) , FREESPACEBOX_BINDING ) ; protocolMarshaller . marshall ( mp4Settings . getMoovPlacement ( ) , MOOVPLACEMENT_BINDING ) ; protocolMarshaller . marshall ( mp4Settings . getMp4MajorBrand ( ) , MP4MAJORBRAND_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class StringUtils { /** * Case insensitive removal of a substring if it is at the begining of a
* source string , otherwise returns the source string .
* A < code > null < / code > source string will return < code > null < / code > . An empty
* ( " " ) source string will return the empty string . A < code > null < / code >
* search string will return the source string .
* < pre >
* StringUtils . removeStartIgnoreCase ( null , * ) = null
* StringUtils . removeStartIgnoreCase ( " " , * ) = " "
* StringUtils . removeStartIgnoreCase ( * , null ) = *
* StringUtils . removeStartIgnoreCase ( " www . domain . com " , " www . " ) = " domain . com "
* StringUtils . removeStartIgnoreCase ( " www . domain . com " , " WWW . " ) = " domain . com "
* StringUtils . removeStartIgnoreCase ( " domain . com " , " www . " ) = " domain . com "
* StringUtils . removeStartIgnoreCase ( " www . domain . com " , " domain " ) = " www . domain . com "
* StringUtils . removeStartIgnoreCase ( " abc " , " " ) = " abc "
* < / pre >
* @ param str
* the source String to search , may be null
* @ param remove
* the String to search for ( case insensitive ) and remove , may be
* null
* @ return the substring with the string removed if found , < code > null < / code >
* if null String input
* @ since 2.4 */
public static String removeStartIgnoreCase ( String str , String remove ) { } } | if ( isEmpty ( str ) || isEmpty ( remove ) ) { return str ; } if ( startsWithIgnoreCase ( str , remove ) ) { return str . substring ( remove . length ( ) ) ; } return str ; |
public class HeaderValidator { /** * { @ inheritDoc } */
@ Override public void validate ( ValidationHelper helper , Context context , String key , Header t ) { } } | if ( t != null ) { String reference = t . getRef ( ) ; if ( reference != null && ! reference . isEmpty ( ) ) { ValidatorUtils . referenceValidatorHelper ( reference , t , helper , context , key ) ; return ; } // The examples object is mutually exclusive of the example object .
if ( ( t . getExample ( ) != null ) && ( t . getExamples ( ) != null && ! t . getExamples ( ) . isEmpty ( ) ) ) { final String message = Tr . formatMessage ( tc , "headerExampleOrExamples" , key ) ; helper . addValidationEvent ( new ValidationEvent ( ValidationEvent . Severity . WARNING , context . getLocation ( ) , message ) ) ; } Schema schema = t . getSchema ( ) ; Content content = t . getContent ( ) ; // A parameter MUST contain either a schema property , or a content property , but not both .
if ( schema == null && content == null ) { final String message = Tr . formatMessage ( tc , "headerSchemaOrContent" , key ) ; helper . addValidationEvent ( new ValidationEvent ( ValidationEvent . Severity . ERROR , context . getLocation ( ) , message ) ) ; } if ( schema != null && content != null ) { final String message = Tr . formatMessage ( tc , "headerSchemaAndContent" , key ) ; helper . addValidationEvent ( new ValidationEvent ( ValidationEvent . Severity . ERROR , context . getLocation ( ) , message ) ) ; } // The ' content ' map MUST only contain one entry .
if ( content != null && content . size ( ) > 1 ) { final String message = Tr . formatMessage ( tc , "headerContentMap" , key ) ; helper . addValidationEvent ( new ValidationEvent ( ValidationEvent . Severity . ERROR , context . getLocation ( ) , message ) ) ; } } |
public class LocalityPreservingProjection { /** * Executes the LPP script , thereby computing the locality preserving
* projection of the data matrix to the specified number of dimension , using
* the affinity matrix to determine locality . The result is written to the
* output file .
* @ param dataMatrixFile a file containing the original data points to be
* projected
* @ param affMatrixFile the file containing the affinity matrix that
* connects data points in the { @ code dataMatrixFile }
* @ param dims the number of dimensions to which the matrix should be
* reduced
* @ param outputMatrix the file to which the output matrix should be written
* in DENSE _ TEXT format */
private static void execute ( File dataMatrixFile , File affMatrixFile , int dims , File outputMatrix ) throws IOException { } } | // Decide whether to use Matlab or Octave
if ( isMatlabAvailable ( ) ) invokeMatlab ( dataMatrixFile , affMatrixFile , dims , outputMatrix ) ; // Ensure that if Matlab isn ' t present that we can at least use Octave
else if ( isOctaveAvailable ( ) ) invokeOctave ( dataMatrixFile , affMatrixFile , dims , outputMatrix ) ; else throw new IllegalStateException ( "Cannot find Matlab or Octave to invoke LPP" ) ; |
public class PlaylistSubscriberStream { /** * { @ inheritDoc } */
public void receiveAudio ( boolean receive ) { } } | if ( engine != null ) { // check if engine currently receives audio , returns previous value
boolean receiveAudio = engine . receiveAudio ( receive ) ; if ( receiveAudio && ! receive ) { // send a blank audio packet to reset the player
engine . sendBlankAudio ( true ) ; } else if ( ! receiveAudio && receive ) { // do a seek
seekToCurrentPlayback ( ) ; } } else { log . debug ( "PlayEngine was null, receiveAudio cannot be modified" ) ; } |
public class JsGeometryIndexStateService { /** * Get the current selection . Do not make changes on this list !
* @ return The current selection ( vertices / edges / sub - geometries ) . */
GeometryIndex [ ] getSelection ( ) { } } | List < GeometryIndex > indices = delegate . getSelection ( ) ; return indices . toArray ( new GeometryIndex [ indices . size ( ) ] ) ; |
public class Dispatcher { /** * Sets the current - thread dispatched page . */
public static void setDispatchedPage ( ServletRequest request , String dispatchedPage ) { } } | if ( logger . isLoggable ( Level . FINE ) ) logger . log ( Level . FINE , "request={0}, dispatchedPage={1}" , new Object [ ] { request , dispatchedPage } ) ; request . setAttribute ( DISPATCHED_PAGE_REQUEST_ATTRIBUTE , dispatchedPage ) ; |
public class Http2ClientStreamTransportState { /** * Called by subclasses whenever a data frame is received from the transport .
* @ param frame the received data frame
* @ param endOfStream { @ code true } if there will be no more data received for this stream */
protected void transportDataReceived ( ReadableBuffer frame , boolean endOfStream ) { } } | if ( transportError != null ) { // We ' ve already detected a transport error and now we ' re just accumulating more detail
// for it .
transportError = transportError . augmentDescription ( "DATA-----------------------------\n" + ReadableBuffers . readAsString ( frame , errorCharset ) ) ; frame . close ( ) ; if ( transportError . getDescription ( ) . length ( ) > 1000 || endOfStream ) { http2ProcessingFailed ( transportError , false , transportErrorMetadata ) ; } } else { if ( ! headersReceived ) { http2ProcessingFailed ( Status . INTERNAL . withDescription ( "headers not received before payload" ) , false , new Metadata ( ) ) ; return ; } inboundDataReceived ( frame ) ; if ( endOfStream ) { // This is a protocol violation as we expect to receive trailers .
transportError = Status . INTERNAL . withDescription ( "Received unexpected EOS on DATA frame from server." ) ; transportErrorMetadata = new Metadata ( ) ; transportReportStatus ( transportError , false , transportErrorMetadata ) ; } } |
public class HealthStatus { /** * Create an instance with the given message , status code , and exception . */
public static HealthStatus create ( int status , String message , Throwable exception ) { } } | return new AutoValue_HealthStatus ( message , status , Optional . ofNullable ( exception ) ) ; |
public class ClientSharedObject { /** * Connect the shared object using the passed connection .
* @ param conn
* Attach SO to given connection */
public void connect ( IConnection conn ) { } } | if ( conn instanceof RTMPConnection ) { if ( ! isConnected ( ) ) { source = conn ; SharedObjectMessage msg = new SharedObjectMessage ( name , 0 , isPersistent ( ) ) ; msg . addEvent ( new SharedObjectEvent ( Type . SERVER_CONNECT , null , null ) ) ; Channel c = ( ( RTMPConnection ) conn ) . getChannel ( 3 ) ; c . write ( msg ) ; } else { throw new UnsupportedOperationException ( "Already connected" ) ; } } else { throw new UnsupportedOperationException ( "Only RTMP connections are supported" ) ; } |
public class Generic2AggPooledTopNScannerPrototype { /** * Any changes to this method should be coordinated with { @ link TopNUtils } , { @ link
* PooledTopNAlgorithm # computeSpecializedScanAndAggregateImplementations } and downstream methods .
* It should be checked with a tool like https : / / github . com / AdoptOpenJDK / jitwatch that C2 compiler output for this
* method doesn ' t have any method calls in the while loop , i . e . all method calls are inlined . To be able to see
* assembly of this method in JITWatch and other similar tools , { @ link
* PooledTopNAlgorithm # specializeGeneric2AggPooledTopN } should be turned off . Note that in this case the benchmark
* should be " naturally monomorphic " , i . e . execute this method always with the same runtime shape .
* If the while loop contains not inlined method calls , it should be considered as a performance bug . */
@ Override public long scanAndAggregate ( DimensionSelector dimensionSelector , BufferAggregator aggregator1 , int aggregator1Size , BufferAggregator aggregator2 , int aggregator2Size , Cursor cursor , int [ ] positions , ByteBuffer resultsBuffer ) { } } | int totalAggregatorsSize = aggregator1Size + aggregator2Size ; long processedRows = 0 ; int positionToAllocate = 0 ; while ( ! cursor . isDoneOrInterrupted ( ) ) { final IndexedInts dimValues = dimensionSelector . getRow ( ) ; final int dimSize = dimValues . size ( ) ; for ( int i = 0 ; i < dimSize ; i ++ ) { int dimIndex = dimValues . get ( i ) ; int position = positions [ dimIndex ] ; if ( position >= 0 ) { aggregator1 . aggregate ( resultsBuffer , position ) ; aggregator2 . aggregate ( resultsBuffer , position + aggregator1Size ) ; } else if ( position == TopNAlgorithm . INIT_POSITION_VALUE ) { positions [ dimIndex ] = positionToAllocate ; position = positionToAllocate ; aggregator1 . init ( resultsBuffer , position ) ; aggregator1 . aggregate ( resultsBuffer , position ) ; position += aggregator1Size ; aggregator2 . init ( resultsBuffer , position ) ; aggregator2 . aggregate ( resultsBuffer , position ) ; positionToAllocate += totalAggregatorsSize ; } } processedRows ++ ; cursor . advanceUninterruptibly ( ) ; } return processedRows ; |
public class MarkovGenerator { /** * Returns a sentence guaranteed to not be any loner than the specified
* number of characters . Will generate a complete sentence - if the
* generator is not able to generate a complete sentence under the limit
* after a number of attempts it will throw an { @ link IllegalArgumentException }
* and give up .
* @ param maxChars the maximum number of characters to return
* @ return a sentence no longer than the specified number of characters
* @ throws IllegalArgumentException if unable to generate a sentence that short after a number of attempts */
public String nextSentence ( int maxChars ) { } } | for ( int i = 0 ; i < 1000 ; i ++ ) { String sentence = nextSentence ( ) ; if ( sentence . length ( ) <= maxChars ) { return sentence ; } } throw new IllegalArgumentException ( "Unable to generate sentence smaller than " + maxChars + "characters. Try setting it higher." ) ; |
public class PortablePositionNavigator { /** * token with [ any ] quantifier */
private static PortablePosition navigateToPathTokenWithAnyQuantifier ( PortableNavigatorContext ctx , PortablePathCursor path , NavigationFrame frame ) throws IOException { } } | // check if the underlying field is of array type
validateArrayType ( ctx . getCurrentClassDefinition ( ) , ctx . getCurrentFieldDefinition ( ) , path . path ( ) ) ; if ( ctx . isCurrentFieldOfType ( FieldType . PORTABLE_ARRAY ) ) { // the result will be returned if it was the last token of the path , otherwise it has just moved further .
PortablePosition result = navigateToPathTokenWithAnyQuantifierInPortableArray ( ctx , path , frame ) ; if ( result != null ) { return result ; } } else { // there will always be a result since it ' s impossible to navigate further from a primitive field .
return navigateToPathTokenWithAnyQuantifierInPrimitiveArray ( ctx , path , frame ) ; } return null ; |
public class DoubleSummary { /** * Return a new value object of the statistical summary , currently
* represented by the { @ code statistics } object .
* @ param statistics the creating ( mutable ) statistics class
* @ return the statistical moments */
public static DoubleSummary of ( final DoubleSummaryStatistics statistics ) { } } | return new DoubleSummary ( statistics . getCount ( ) , statistics . getMin ( ) , statistics . getMax ( ) , statistics . getSum ( ) , statistics . getAverage ( ) ) ; |
public class RegistryService { /** * Defines the configuration of the service thanks to the provided plugin if and
* only if the plugin is of type { @ link RegistryInitializationEntryPlugin }
* @ param plugin the plugin from which we extract the configuration that is expected
* to be of type { @ link RegistryInitializationEntryPlugin } */
public void addPlugin ( ComponentPlugin plugin ) { } } | if ( RegistryInitializationEntryPlugin . class . isAssignableFrom ( plugin . getClass ( ) ) ) { RegistryInitializationEntryPlugin registryPlugin = ( RegistryInitializationEntryPlugin ) plugin ; appConfigurations = registryPlugin . getAppConfiguration ( ) ; entryLocation = registryPlugin . getLocation ( ) ; if ( entryLocation == null ) entryLocation = EXO_APPLICATIONS ; } |
public class RotationAxis { /** * Find a segment of the axis that covers the specified set of atoms .
* Projects the input atoms onto the rotation axis and returns the bounding
* points .
* In the case of a pure translational axis , the axis location is undefined
* so the center of mass will be used instead .
* @ param atoms
* @ return two points defining the axis segment */
public Pair < Atom > getAxisEnds ( Atom [ ] atoms ) { } } | // Project each Atom onto the rotation axis to determine limits
double min , max ; min = max = Calc . scalarProduct ( rotationAxis , atoms [ 0 ] ) ; for ( int i = 1 ; i < atoms . length ; i ++ ) { double prod = Calc . scalarProduct ( rotationAxis , atoms [ i ] ) ; if ( prod < min ) min = prod ; if ( prod > max ) max = prod ; } double uLen = Calc . scalarProduct ( rotationAxis , rotationAxis ) ; // Should be 1 , but double check
min /= uLen ; max /= uLen ; // Project the origin onto the axis . If the axis is undefined , use the center of mass
Atom axialPt ; if ( rotationPos == null ) { Atom center = Calc . centerOfMass ( atoms ) ; // Project center onto the axis
Atom centerOnAxis = Calc . scale ( rotationAxis , Calc . scalarProduct ( center , rotationAxis ) ) ; // Remainder is projection of origin onto axis
axialPt = Calc . subtract ( center , centerOnAxis ) ; } else { axialPt = rotationPos ; } // Find end points of the rotation axis to display
Atom axisMin = ( Atom ) axialPt . clone ( ) ; Calc . scaleAdd ( min , rotationAxis , axisMin ) ; Atom axisMax = ( Atom ) axialPt . clone ( ) ; Calc . scaleAdd ( max , rotationAxis , axisMax ) ; return new Pair < > ( axisMin , axisMax ) ; |
public class VoltDecimalHelper { /** * Converts BigInteger ' s byte representation containing a scaled magnitude to a fixed size 16 byte array
* and set the sign in the most significant byte ' s most significant bit .
* @ param scaledValue Scaled twos complement representation of the decimal
* @ param isNegative Determines whether the sign bit is set
* @ return */
private static final byte [ ] expandToLength16 ( byte scaledValue [ ] , final boolean isNegative ) { } } | if ( scaledValue . length == 16 ) { return scaledValue ; } byte replacement [ ] = new byte [ 16 ] ; if ( isNegative ) { Arrays . fill ( replacement , ( byte ) - 1 ) ; } int shift = ( 16 - scaledValue . length ) ; for ( int ii = 0 ; ii < scaledValue . length ; ++ ii ) { replacement [ ii + shift ] = scaledValue [ ii ] ; } return replacement ; |
public class CollectHelper { /** * Checks in a doCall back . It also wraps up the group if all the callbacks have checked in . */
void add ( final byte type , final Object value ) { } } | final int w = write . getAndIncrement ( ) ; if ( w < size ) { writeAt ( w , type , value ) ; } // countdown could wrap around , however we check the state of finished in here .
// MUST be called after write to make sure that results and states are synchronized .
final int c = countdown . decrementAndGet ( ) ; if ( c < 0 ) { throw new IllegalStateException ( "already finished (countdown)" ) ; } // if this thread is not the last thread to check - in , do nothing . .
if ( c != 0 ) { return ; } // make sure this can only happen once .
// This protects against countdown , and write wrapping around which should very rarely
// happen .
if ( ! done . compareAndSet ( false , true ) ) { throw new IllegalStateException ( "already finished" ) ; } done ( collect ( ) ) ; |
public class CborDecoder { /** * Streaming decoding of an input stream . On each decoded DataItem , the
* callback listener is invoked .
* @ param dataItemListener
* the callback listener
* @ throws CborException
* if decoding failed */
public void decode ( DataItemListener dataItemListener ) throws CborException { } } | Objects . requireNonNull ( dataItemListener ) ; DataItem dataItem = decodeNext ( ) ; while ( dataItem != null ) { dataItemListener . onDataItem ( dataItem ) ; dataItem = decodeNext ( ) ; } |
public class JwtAuthenticationService { /** * Returns the token ( as a String ) , if it exists , otherwise returns null .
* @ param headers the HttpHeader to inspect to find the Authorization - Token
* cookie or Authorization Bearer header
* @ return the token if found , otherwise null
* @ since 1.0.0 */
private String getAuthorizationToken ( final HttpHeaders headers ) { } } | if ( headers . getCookies ( ) != null ) { for ( Map . Entry < String , Cookie > entry : headers . getCookies ( ) . entrySet ( ) ) { if ( AuthorizationTokenCookie . COOKIE_NAME . equals ( entry . getValue ( ) . getName ( ) ) ) { return entry . getValue ( ) . getValue ( ) ; } } } final List < String > header = headers . getRequestHeader ( "Authorization" ) ; if ( header != null ) { final String bearer = header . get ( 0 ) ; if ( bearer != null ) { return bearer . substring ( "Bearer " . length ( ) ) ; } } return null ; |
public class DeleteListener { /** * When a batch fails or a callback throws an Exception , run this listener
* code . Multiple listeners can be registered with this method .
* @ param listener the code to run when a failure occurs
* @ return this instance for method chaining
* @ deprecated use { @ link # onFailure ( BatchFailureListener ) } */
@ Deprecated public DeleteListener onBatchFailure ( BatchFailureListener < Batch < String > > listener ) { } } | failureListeners . add ( listener ) ; return this ; |
public class XpathUtils { /** * Same as { @ link # asDouble ( String , Node ) } but allows an xpath to be passed
* in explicitly for reuse . */
public static Double asDouble ( String expression , Node node , XPath xpath ) throws XPathExpressionException { } } | String doubleString = evaluateAsString ( expression , node , xpath ) ; return ( isEmptyString ( doubleString ) ) ? null : Double . parseDouble ( doubleString ) ; |
public class GeographyPointValue { /** * Create a GeographyPointValue from a well - known text string .
* @ param param A well - known text string .
* @ return A new instance of GeographyPointValue . */
public static GeographyPointValue fromWKT ( String param ) { } } | if ( param == null ) { throw new IllegalArgumentException ( "Null well known text argument to GeographyPointValue constructor." ) ; } Matcher m = wktPattern . matcher ( param ) ; if ( m . find ( ) ) { // Add 0.0 to avoid - 0.0.
double longitude = toDouble ( m . group ( 1 ) , m . group ( 2 ) ) + 0.0 ; double latitude = toDouble ( m . group ( 3 ) , m . group ( 4 ) ) + 0.0 ; if ( Math . abs ( latitude ) > 90.0 ) { throw new IllegalArgumentException ( String . format ( "Latitude \"%f\" out of bounds." , latitude ) ) ; } if ( Math . abs ( longitude ) > 180.0 ) { throw new IllegalArgumentException ( String . format ( "Longitude \"%f\" out of bounds." , longitude ) ) ; } return new GeographyPointValue ( longitude , latitude ) ; } else { throw new IllegalArgumentException ( "Cannot construct GeographyPointValue value from \"" + param + "\"" ) ; } |
public class TargetStreamSetControl { /** * / * ( non - Javadoc )
* @ see com . ibm . ws . sib . processor . runtime . SIMPControllable # getId ( ) */
public String getId ( ) { } } | if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "getId" ) ; String returnString = streamID . toString ( ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "getId" , returnString ) ; return returnString ; |
public class SmtpRequests { /** * Creates a { @ code VRFY } request . */
public static SmtpRequest vrfy ( CharSequence user ) { } } | return new DefaultSmtpRequest ( SmtpCommand . VRFY , ObjectUtil . checkNotNull ( user , "user" ) ) ; |
public class ThreadPoolManager { /** * 监控api */
public static int queueSize ( ) { } } | int queueSize = 0 ; for ( ExecutorService pool : EXECUTORS ) { if ( pool instanceof ThreadPoolExecutor ) { queueSize += ( ( ThreadPoolExecutor ) pool ) . getQueue ( ) . size ( ) ; } } for ( ExecutorService pool : EXPLICIT_EXECUTORS ) { if ( pool instanceof ThreadPoolExecutor ) { queueSize += ( ( ThreadPoolExecutor ) pool ) . getQueue ( ) . size ( ) ; } } return queueSize ; |
public class SegmentManager { /** * Returns a map of dataSource to the total byte size of segments managed by this segmentManager . This method should
* be used carefully because the returned map might be different from the actual data source states .
* @ return a map of dataSources and their total byte sizes */
public Map < String , Long > getDataSourceSizes ( ) { } } | return dataSources . entrySet ( ) . stream ( ) . collect ( Collectors . toMap ( Entry :: getKey , entry -> entry . getValue ( ) . getTotalSegmentSize ( ) ) ) ; |
public class ESHttpUtils { /** * Returns an string from a node ' s content .
* @ param rootNode
* Node to search .
* @ param xPath
* XPath to use .
* @ param expression
* XPath expression .
* @ return Node or < code > null < / code > if no match was found . */
@ Nullable public static String findContentText ( final Node rootNode , final XPath xPath , final String expression ) { } } | final Node node = findNode ( rootNode , xPath , expression ) ; if ( node == null ) { return null ; } return node . getTextContent ( ) ; |
public class Settings { /** * Returns boolean flag
* @ param name of flag
* @ return true or false
* @ throws IllegalArgumentException in case setting is not a boolean setting */
public boolean getBool ( String name ) { } } | Object value = super . get ( name ) ; if ( value instanceof Boolean ) { return ( Boolean ) value ; } if ( value instanceof String ) { String txt = ( String ) value ; txt = txt . trim ( ) . toLowerCase ( ) ; if ( "yes" . equals ( txt ) || "y" . equals ( txt ) || "1" . equals ( txt ) || "true" . equals ( txt ) || "t" . equals ( txt ) ) { return true ; } if ( "no" . equals ( txt ) || "n" . equals ( txt ) || "0" . equals ( txt ) || "false" . equals ( txt ) || "f" . equals ( txt ) ) { return false ; } } if ( value == null ) { throw new IllegalArgumentException ( "Setting: '" + name + "', not found!" ) ; } throw new IllegalArgumentException ( "Setting: '" + name + "', can't be converted to boolean: '" + value + "'!" ) ; |
public class TableSliceGroup { /** * Applies the given aggregation to the given column .
* The apply and combine steps of a split - apply - combine . */
public Table aggregate ( String colName1 , AggregateFunction < ? , ? > ... functions ) { } } | ArrayListMultimap < String , AggregateFunction < ? , ? > > columnFunctionMap = ArrayListMultimap . create ( ) ; columnFunctionMap . putAll ( colName1 , Lists . newArrayList ( functions ) ) ; return aggregate ( columnFunctionMap ) ; |
public class StorageObjectSummary { /** * Contructs a StorageObjectSummary object from the S3 equivalent S3ObjectSummary
* @ param objSummary the AWS S3 ObjectSummary object to copy from
* @ return the ObjectSummary object created */
public static StorageObjectSummary createFromS3ObjectSummary ( S3ObjectSummary objSummary ) { } } | return new StorageObjectSummary ( objSummary . getBucketName ( ) , objSummary . getKey ( ) , // S3 ETag is not always MD5 , but since this code path is only
// used in skip duplicate files in PUT command , It ' s not
// critical to guarantee that it ' s MD5
objSummary . getETag ( ) , objSummary . getSize ( ) ) ; |
public class Node { /** * syck _ seq _ assign */
public void seqAssign ( int idx , Object id ) { } } | ( ( Data . Seq ) data ) . items [ idx ] = id ; |
public class CmsSystemConfiguration { /** * Adds a new instance of a resource init handler class . < p >
* @ param clazz the class name of the resource init handler to instantiate and add */
public void addResourceInitHandler ( String clazz ) { } } | Object initClass ; try { initClass = Class . forName ( clazz ) . newInstance ( ) ; } catch ( Throwable t ) { LOG . error ( Messages . get ( ) . getBundle ( ) . key ( Messages . LOG_RESOURCE_INIT_CLASS_INVALID_1 , clazz ) , t ) ; return ; } if ( initClass instanceof I_CmsResourceInit ) { m_resourceInitHandlers . add ( ( I_CmsResourceInit ) initClass ) ; if ( CmsLog . INIT . isInfoEnabled ( ) ) { CmsLog . INIT . info ( Messages . get ( ) . getBundle ( ) . key ( Messages . INIT_RESOURCE_INIT_SUCCESS_1 , clazz ) ) ; } } else { if ( CmsLog . INIT . isErrorEnabled ( ) ) { CmsLog . INIT . error ( Messages . get ( ) . getBundle ( ) . key ( Messages . INIT_RESOURCE_INIT_INVALID_CLASS_1 , clazz ) ) ; } } |
public class IndexTerm { /** * Get the full term , with any prefix .
* @ return full term with prefix */
public String getTermFullName ( ) { } } | if ( termPrefix == null ) { return termName ; } else { if ( termLocale == null ) { return termPrefix . message + STRING_BLANK + termName ; } else { final String key = "IndexTerm." + termPrefix . message . toLowerCase ( ) . trim ( ) . replace ( ' ' , '-' ) ; final String msg = Messages . getString ( key , termLocale ) ; if ( rtlLocaleList . contains ( termLocale . toString ( ) ) ) { return termName + STRING_BLANK + msg ; } else { return msg + STRING_BLANK + termName ; } } } |
public class DateAndTimeBenchmark { /** * Formats a legacy { @ link Date } by using { @ link SimpleDateFormat } .
* @ return Formatted date */
@ Benchmark @ BenchmarkMode ( Mode . Throughput ) public String formatDate ( ) { } } | synchronized ( SIMPLE_DATE_FORMAT ) { return SIMPLE_DATE_FORMAT . format ( DATE ) ; } |
public class TextUtils { /** * Searches the specified array of texts ( { @ code values } ) for the specified text & mdash ; or a fragment , using an
* ( offset , len ) specification & mdash ; using the binary search algorithm .
* Note the specified { @ code values } parameter < strong > must be lexicographically ordered < / strong > .
* @ param caseSensitive whether the comparison must be done in a case - sensitive or case - insensitive way .
* @ param values the array of texts inside which the specified text will be searched .
* Note that it must be < strong > ordered < / strong > .
* @ param valuesOffset the offset to be applied to the texts array so that search only takes part in a fragment
* of it .
* @ param valuesLen the length of the fragment of the texts array in which search will take part .
* @ param text the text to search .
* @ param textOffset the offset of the text to search .
* @ param textLen the length of the text to search .
* @ return index of the search key , if it is contained in the values array ; otherwise ,
* { @ code ( - ( insertion point ) - 1 ) } . The insertion point is defined as the point at
* which the key would be inserted into the array . Note that this guarantees that the return value will
* be & gt ; = 0 if and only if the key is found . */
public static int binarySearch ( final boolean caseSensitive , final char [ ] [ ] values , final int valuesOffset , final int valuesLen , final char [ ] text , final int textOffset , final int textLen ) { } } | if ( values == null ) { throw new IllegalArgumentException ( "Values array cannot be null" ) ; } if ( text == null ) { throw new IllegalArgumentException ( "Text cannot be null" ) ; } int low = valuesOffset ; int high = ( valuesOffset + valuesLen ) - 1 ; int mid , cmp ; char [ ] midVal ; while ( low <= high ) { mid = ( low + high ) >>> 1 ; midVal = values [ mid ] ; cmp = compareTo ( caseSensitive , midVal , 0 , midVal . length , text , textOffset , textLen ) ; if ( cmp < 0 ) { low = mid + 1 ; } else if ( cmp > 0 ) { high = mid - 1 ; } else { // Found ! !
return mid ; } } return - ( low + 1 ) ; // Not Found ! ! We return ( - ( insertion point ) - 1 ) , to guarantee all non - founds are < 0 |
public class Session { /** * Retrieves the result of inserting , updating or deleting a row
* from an updatable result .
* @ return the result of executing the statement */
private Result executeResultUpdate ( Result cmd ) { } } | long id = cmd . getResultId ( ) ; int actionType = cmd . getActionType ( ) ; Result result = sessionData . getDataResult ( id ) ; if ( result == null ) { return Result . newErrorResult ( Error . error ( ErrorCode . X_24501 ) ) ; } Object [ ] pvals = cmd . getParameterData ( ) ; Type [ ] types = cmd . metaData . columnTypes ; StatementQuery statement = ( StatementQuery ) result . getStatement ( ) ; QueryExpression qe = statement . queryExpression ; Table baseTable = qe . getBaseTable ( ) ; int [ ] columnMap = qe . getBaseTableColumnMap ( ) ; sessionContext . rowUpdateStatement . setRowActionProperties ( actionType , baseTable , types , columnMap ) ; Result resultOut = executeCompiledStatement ( sessionContext . rowUpdateStatement , pvals ) ; return resultOut ; |
public class AbstractRunMojo { /** * Get the Tomcat port . By default the port is changed by using the maven . alfresco . tomcat . port property
* but for legacy and external configuration purposes maven . tomcat . port will override if defined
* @ return the Tomcat port */
protected String getPort ( ) { } } | String port = tomcatPort ; if ( mavenTomcatPort != null ) { port = mavenTomcatPort ; getLog ( ) . info ( "Tomcat Port overridden by property maven.tomcat.port" ) ; } return port ; |
public class GregorianMath { /** * / * [ deutsch ]
* < p > Liefert den Tag des Woche f & uuml ; r das angegebene Datum . < / p >
* < p > Diese Methode setzt gem & auml ; & szlig ; dem ISO - 8601 - Standard den
* Montag als ersten Tag der Woche voraus . < / p >
* @ param year proleptic iso year
* @ param month gregorian month ( 1-12)
* @ param dayOfMonth day of month ( 1-31)
* @ return day of week ( monday = 1 , . . . , sunday = 7)
* @ throws IllegalArgumentException if the month or the day are
* out of range */
public static int getDayOfWeek ( int year , int month , int dayOfMonth ) { } } | if ( ( dayOfMonth < 1 ) || ( dayOfMonth > 31 ) ) { throw new IllegalArgumentException ( "Day out of range: " + dayOfMonth ) ; } else if ( dayOfMonth > getLengthOfMonth ( year , month ) ) { throw new IllegalArgumentException ( "Day exceeds month length: " + toString ( year , month , dayOfMonth ) ) ; } int m = gaussianWeekTerm ( month ) ; int y = ( year % 100 ) ; int c = Math . floorDiv ( year , 100 ) ; if ( y < 0 ) { y += 100 ; } if ( month <= 2 ) { // Januar oder Februar
y -- ; if ( y < 0 ) { y = 99 ; c -- ; } } // Gauß ' sche Wochentagsformel
int k = Math . floorDiv ( c , 4 ) ; int w = ( ( dayOfMonth + m + y + ( y / 4 ) + k - 2 * c ) % 7 ) ; if ( w <= 0 ) { w += 7 ; } return w ; |
public class GroovyCollections { /** * Selects the minimum value found in an Iterable of items .
* @ param items an Iterable
* @ return the minimum value
* @ since 2.2.0 */
public static < T > T min ( Iterable < T > items ) { } } | T answer = null ; for ( T value : items ) { if ( value != null ) { if ( answer == null || ScriptBytecodeAdapter . compareLessThan ( value , answer ) ) { answer = value ; } } } return answer ; |
public class JesqueUtils { /** * This is needed because Throwable doesn ' t override equals ( ) and object
* equality is not what we want to test .
* @ param ex1
* first Throwable
* @ param ex2
* second Throwable
* @ return true if the two arguments are equal , as we define it . */
public static boolean equal ( final Throwable ex1 , final Throwable ex2 ) { } } | if ( ex1 == ex2 ) { return true ; } if ( ex1 == null ) { if ( ex2 != null ) { return false ; } } else if ( ex2 == null ) { if ( ex1 != null ) { return false ; } } else { if ( ex1 . getClass ( ) != ex2 . getClass ( ) ) { return false ; } if ( ex1 . getMessage ( ) == null ) { if ( ex2 . getMessage ( ) != null ) { return false ; } } else if ( ! ex1 . getMessage ( ) . equals ( ex2 . getMessage ( ) ) ) { return false ; } if ( ! equal ( ex1 . getCause ( ) , ex2 . getCause ( ) ) ) { return false ; } } return true ; |
public class CompactionAuditCountVerifier { /** * Compare record count between { @ link CompactionAuditCountVerifier # gobblinTier } and { @ link CompactionAuditCountVerifier # referenceTiers } .
* @ param datasetName the name of dataset
* @ param countsByTier the tier - to - count mapping retrieved by { @ link AuditCountClient # fetch ( String , long , long ) }
* @ param referenceTier the tiers we wants to compare against
* @ return If any of ( gobblin / refenence ) > = threshold , return true , else return false */
private Result passed ( String datasetName , Map < String , Long > countsByTier , String referenceTier ) { } } | if ( ! countsByTier . containsKey ( this . gobblinTier ) ) { log . info ( "Missing entry for dataset: " + datasetName + " in gobblin tier: " + this . gobblinTier + "; setting count to 0." ) ; } if ( ! countsByTier . containsKey ( referenceTier ) ) { log . info ( "Missing entry for dataset: " + datasetName + " in reference tier: " + referenceTier + "; setting count to 0." ) ; } long refCount = countsByTier . getOrDefault ( referenceTier , 0L ) ; long gobblinCount = countsByTier . getOrDefault ( this . gobblinTier , 0L ) ; if ( refCount == 0 ) { return new Result ( true , "" ) ; } if ( ( double ) gobblinCount / ( double ) refCount < this . threshold ) { return new Result ( false , String . format ( "%s failed for %s : gobblin count = %d, %s count = %d (%f < threshold %f)" , this . getName ( ) , datasetName , gobblinCount , referenceTier , refCount , ( double ) gobblinCount / ( double ) refCount , this . threshold ) ) ; } return new Result ( true , "" ) ; |
public class DefaultMongoDBDataHandler { /** * Gets the lob from GFS entity .
* @ param gfs
* the gfs
* @ param m
* the m
* @ param entity
* the entity
* @ param kunderaMetadata
* the kundera metadata
* @ return the lob from gfs entity */
public Object getLobFromGFSEntity ( GridFS gfs , EntityMetadata m , Object entity , KunderaMetadata kunderaMetadata ) { } } | MetamodelImpl metaModel = ( MetamodelImpl ) kunderaMetadata . getApplicationMetadata ( ) . getMetamodel ( m . getPersistenceUnit ( ) ) ; EntityType entityType = metaModel . entity ( m . getEntityClazz ( ) ) ; Set < Attribute > columns = entityType . getAttributes ( ) ; for ( Attribute column : columns ) { boolean isLob = ( ( Field ) column . getJavaMember ( ) ) . getAnnotation ( Lob . class ) != null ; if ( isLob ) { return PropertyAccessorHelper . getObject ( entity , ( Field ) column . getJavaMember ( ) ) ; } } return null ; |
public class JsonUtils { /** * Utility for test classes , so that they can inline json in a test class .
* Does a character level replacement of apostrophe ( ' ) with double quote ( " ) .
* This means you can express a snippit of JSON without having to forward
* slash escape everything .
* This is character based , so don ' t have any apostrophes ( ' ) in your test
* data .
* @ param javason JSON - ish string you want to turn into Maps - of - Maps
* @ return Maps - of - Maps */
public static Map < String , Object > javason ( String javason ) { } } | String json = javason . replace ( '\'' , '"' ) ; return jsonToMap ( new ByteArrayInputStream ( json . getBytes ( ) ) ) ; |
public class JobDataMarshaller { /** * Marshall the given parameter object . */
public void marshall ( JobData jobData , ProtocolMarshaller protocolMarshaller ) { } } | if ( jobData == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( jobData . getActionTypeId ( ) , ACTIONTYPEID_BINDING ) ; protocolMarshaller . marshall ( jobData . getActionConfiguration ( ) , ACTIONCONFIGURATION_BINDING ) ; protocolMarshaller . marshall ( jobData . getPipelineContext ( ) , PIPELINECONTEXT_BINDING ) ; protocolMarshaller . marshall ( jobData . getInputArtifacts ( ) , INPUTARTIFACTS_BINDING ) ; protocolMarshaller . marshall ( jobData . getOutputArtifacts ( ) , OUTPUTARTIFACTS_BINDING ) ; protocolMarshaller . marshall ( jobData . getArtifactCredentials ( ) , ARTIFACTCREDENTIALS_BINDING ) ; protocolMarshaller . marshall ( jobData . getContinuationToken ( ) , CONTINUATIONTOKEN_BINDING ) ; protocolMarshaller . marshall ( jobData . getEncryptionKey ( ) , ENCRYPTIONKEY_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class JobWorkerIdRegistry { /** * Registers with { @ link JobMaster } to get a new job worker id .
* @ param jobMasterClient the job master client to be used for RPC
* @ param workerAddress current worker address
* @ throws IOException when fails to get a new worker id
* @ throws ConnectionFailedException if network connection failed */
public static void registerWorker ( JobMasterClient jobMasterClient , WorkerNetAddress workerAddress ) throws IOException , ConnectionFailedException { } } | sWorkerId . set ( jobMasterClient . registerWorker ( workerAddress ) ) ; |
public class AOStream { /** * Helper method .
* Called from withing a synchronized ( this ) block .
* @ param discriminators
* @ param selectorDomains
* @ param selectors
* @ return */
private final JSRemoteConsumerPoint findOrCreateJSRemoteConsumerPoint ( String [ ] discriminators , int [ ] selectorDomains , String [ ] selectors ) throws ClosedException { } } | if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "findOrCreateJSRemoteConsumerPoint" , new Object [ ] { Arrays . toString ( discriminators ) , Arrays . toString ( selectorDomains ) , Arrays . toString ( selectors ) } ) ; String selectionCriteriasAsString = parent . convertSelectionCriteriasToString ( discriminators , selectorDomains , selectors ) ; JSRemoteConsumerPoint aock = ( JSRemoteConsumerPoint ) consumerKeyTable . get ( selectionCriteriasAsString ) ; if ( aock == null ) { try { // create an JSRemoteConsumerPoint
aock = new JSRemoteConsumerPoint ( ) ; SelectionCriteria [ ] criterias = new SelectionCriteria [ discriminators . length ] ; ConsumableKey [ ] consumerKeys = new ConsumableKey [ discriminators . length ] ; OrderingContextImpl ocontext = null ; SIBUuid12 connectionUuid = new SIBUuid12 ( ) ; if ( discriminators . length > 1 ) ocontext = new OrderingContextImpl ( ) ; // create a new ordering context
for ( int i = 0 ; i < discriminators . length ; i ++ ) { SelectorDomain domain = SelectorDomain . getSelectorDomain ( selectorDomains [ i ] ) ; criterias [ i ] = parent . createSelectionCriteria ( discriminators [ i ] , selectors [ i ] , domain ) ; // attach as many times as necessary
consumerKeys [ i ] = ( ConsumableKey ) consumerDispatcher . attachConsumerPoint ( aock , criterias [ i ] , connectionUuid , false , false , null ) ; if ( ocontext != null ) consumerDispatcher . joinKeyGroup ( consumerKeys [ i ] , ocontext ) ; consumerKeys [ i ] . start ( ) ; // in case we use a ConsumerKeyGroup , this is essential
} if ( parent . getCardinalityOne ( ) || consumerDispatcher . isPubSub ( ) ) { // effectively infinite timeout , since don ' t want to close the ConsumerKey if RME is inactive for
// a while . Only close this ConsumerKey when start flushing this stream .
// NOTE shared durable subs might not be cardinality one but we still do not want the streams
// to flush on timeout
// Defect 516583 , set the idleTimeout parameter to 0 , not to Long . MAX _ VALUE in order to have an
// " infinite timeout " .
aock . init ( this , selectionCriteriasAsString , consumerKeys , 0 , am , criterias ) ; } else { aock . init ( this , selectionCriteriasAsString , consumerKeys , mp . getCustomProperties ( ) . get_ck_idle_timeout ( ) , am , criterias ) ; } consumerKeyTable . put ( selectionCriteriasAsString , aock ) ; } catch ( Exception e ) { // should not occur !
FFDCFilter . processException ( e , "com.ibm.ws.sib.processor.impl.AOStream.findOrCreateJSRemoteConsumerPoint" , "1:2942:1.80.3.24" , this ) ; SibTr . exception ( tc , e ) ; aock = null ; ClosedException e2 = new ClosedException ( e . getMessage ( ) ) ; // just using the ClosedException as a convenience
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "findOrCreateJSRemoteConsumerPoint" , e2 ) ; throw e2 ; } } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "findOrCreateJSRemoteConsumerPoint" , aock ) ; return aock ; |
public class GroovyResultSetExtension { /** * Supports integer based subscript operators for updating the values of numbered columns
* starting at zero . Negative indices are supported , they will count from the last column backwards .
* @ param index is the number of the column to look at starting at 1
* @ param newValue the updated value
* @ throws java . sql . SQLException if something goes wrong
* @ see ResultSet # updateObject ( java . lang . String , java . lang . Object ) */
public void putAt ( int index , Object newValue ) throws SQLException { } } | index = normalizeIndex ( index ) ; getResultSet ( ) . updateObject ( index , newValue ) ; |
public class RequestType { /** * Get the table name . */
public String getTableNames ( boolean bAddQuotes ) { } } | return ( m_tableName == null ) ? Record . formatTableNames ( REQUEST_TYPE_FILE , bAddQuotes ) : super . getTableNames ( bAddQuotes ) ; |
public class SampleSummaryStatistics { /** * Return a { @ code Collector } which applies an double - producing mapping
* function to each input element , and returns moments - statistics for the
* resulting values .
* < pre > { @ code
* final Stream < Sample > stream = . . .
* final SampleSummaryStatistics statistics = stream
* . collect ( toDoubleMomentStatistics ( parameterCount ) ) ;
* } < / pre >
* @ param parameterCount the number of parameter of the accumulated
* { @ code Sample } objects
* @ return a { @ code Collector } implementing the sample reduction
* @ throws IllegalArgumentException if the given { @ code parameterCount }
* is smaller then one */
public static Collector < Sample , ? , SampleSummaryStatistics > toSampleStatistics ( final int parameterCount ) { } } | require . positive ( parameterCount ) ; return Collector . of ( ( ) -> new SampleSummaryStatistics ( parameterCount ) , SampleSummaryStatistics :: accept , SampleSummaryStatistics :: combine ) ; |
public class CryptoKey { /** * < code > . google . privacy . dlp . v2 . TransientCryptoKey transient = 1 ; < / code > */
public com . google . privacy . dlp . v2 . TransientCryptoKeyOrBuilder getTransientOrBuilder ( ) { } } | if ( sourceCase_ == 1 ) { return ( com . google . privacy . dlp . v2 . TransientCryptoKey ) source_ ; } return com . google . privacy . dlp . v2 . TransientCryptoKey . getDefaultInstance ( ) ; |
public class BitVector { /** * 1的数量
* @ param unit
* @ return */
private static int popCount ( int unit ) { } } | unit = ( ( unit & 0xAAAAAAAA ) >>> 1 ) + ( unit & 0x55555555 ) ; unit = ( ( unit & 0xCCCCCCCC ) >>> 2 ) + ( unit & 0x33333333 ) ; unit = ( ( unit >>> 4 ) + unit ) & 0x0F0F0F0F ; unit += unit >>> 8 ; unit += unit >>> 16 ; return unit & 0xFF ; |
public class ClassUtils { /** * Checks if given class is present .
* @ param className
* @ return */
static boolean isClassPresent ( String className ) { } } | try { ClassUtils . class . getClassLoader ( ) . loadClass ( className ) ; return true ; } catch ( Throwable e ) { return false ; } |
public class ColorHelper { /** * Method to transform from HSV to RGB ( this method sets alpha as 0xff )
* @ param h - hue
* @ param s - saturation
* @ param v - brightness
* @ return rgb color */
public static int fromHSV ( float h , float s , float v ) { } } | int a = MAX_INT ; return fromHSV ( h , s , v , a ) ; |
public class PrintUtilities { /** * Print data from a matrix .
* @ param matrix
* the matrix . */
public static void printMatrixData ( double [ ] [ ] matrix ) { } } | int cols = matrix [ 0 ] . length ; int rows = matrix . length ; for ( int r = 0 ; r < rows ; r ++ ) { for ( int c = 0 ; c < cols ; c ++ ) { printer . print ( matrix [ r ] [ c ] ) ; printer . print ( separator ) ; } printer . println ( ) ; } |
public class UpdateDataSourceRequestMarshaller { /** * Marshall the given parameter object . */
public void marshall ( UpdateDataSourceRequest updateDataSourceRequest , ProtocolMarshaller protocolMarshaller ) { } } | if ( updateDataSourceRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( updateDataSourceRequest . getDataSourceId ( ) , DATASOURCEID_BINDING ) ; protocolMarshaller . marshall ( updateDataSourceRequest . getDataSourceName ( ) , DATASOURCENAME_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class RangeToken { /** * for RANGE or NRANGE */
protected void addRange ( int start , int end ) { } } | this . icaseCache = null ; // System . err . println ( " Token # addRange ( ) : " + start + " " + end ) ;
int r1 , r2 ; if ( start <= end ) { r1 = start ; r2 = end ; } else { r1 = end ; r2 = start ; } int pos = 0 ; if ( this . ranges == null ) { this . ranges = new int [ 2 ] ; this . ranges [ 0 ] = r1 ; this . ranges [ 1 ] = r2 ; this . setSorted ( true ) ; } else { pos = this . ranges . length ; if ( this . ranges [ pos - 1 ] + 1 == r1 ) { this . ranges [ pos - 1 ] = r2 ; return ; } int [ ] temp = new int [ pos + 2 ] ; System . arraycopy ( this . ranges , 0 , temp , 0 , pos ) ; this . ranges = temp ; if ( this . ranges [ pos - 1 ] >= r1 ) this . setSorted ( false ) ; this . ranges [ pos ++ ] = r1 ; this . ranges [ pos ] = r2 ; if ( ! this . sorted ) this . sortRanges ( ) ; } |
public class JobsInner { /** * Retrieve the job output identified by job id .
* @ param resourceGroupName Name of an Azure Resource group .
* @ param automationAccountName The name of the automation account .
* @ param jobId The job id .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ throws ErrorResponseException thrown if the request is rejected by server
* @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
* @ return the InputStream object if successful . */
public InputStream getOutput ( String resourceGroupName , String automationAccountName , String jobId ) { } } | return getOutputWithServiceResponseAsync ( resourceGroupName , automationAccountName , jobId ) . toBlocking ( ) . single ( ) . body ( ) ; |
public class ConfigUtil { /** * Tries to discover user proxy location .
* If a UID system property is set , and running on a Unix machine it
* returns / tmp / x509up _ u $ { UID } . If any other machine then Unix , it returns
* $ { tempdir } / x509up _ u $ { UID } , where tempdir is a platform - specific
* temporary directory as indicated by the java . io . tmpdir system property .
* If a UID system property is not set , the username will be used instead
* of the UID . That is , it returns $ { tempdir } / x509up _ u _ $ { username } */
public static String discoverProxyLocation ( ) { } } | String dir = null ; if ( getOS ( ) == UNIX_OS ) { dir = "/tmp/" ; } else { String tmpDir = System . getProperty ( "java.io.tmpdir" ) ; dir = ( tmpDir == null ) ? globus_dir : tmpDir ; } String uid = System . getProperty ( "UID" ) ; if ( uid != null ) { return getLocation ( dir , PROXY_NAME + uid ) ; } else if ( getOS ( ) == UNIX_OS ) { try { return getLocation ( dir , PROXY_NAME + getUID ( ) ) ; } catch ( IOException e ) { } } /* If all else fails use username */
String suffix = System . getProperty ( "user.name" ) ; if ( suffix != null ) { suffix = suffix . toLowerCase ( ) ; } else { suffix = "nousername" ; } return getLocation ( dir , PROXY_NAME + "_" + suffix ) ; |
public class AssertMessages { /** * Parameter A must be lower than or equal to Parameter B .
* @ param aindex the index of the parameter A .
* @ param avalue the value of the parameter A .
* @ param bindex the index of the parameter B .
* @ param bvalue the value of the parameter B .
* @ return the error message . */
@ Pure public static String lowerEqualParameters ( int aindex , Object avalue , int bindex , Object bvalue ) { } } | return msg ( "A3" , aindex , avalue , bindex , bvalue ) ; // $ NON - NLS - 1 $ |
public class ExceptionQueuedEventContext { /** * < p class = " changed _ added _ 2_0 " > Return a < code > List < / code > that
* contains a single entry , the { @ link
* javax . faces . context . ExceptionHandler } for the current
* request . < / p > */
public List < SystemEventListener > getListenersForEventClass ( Class < ? extends SystemEvent > facesEventClass ) { } } | if ( null == listener ) { List < SystemEventListener > list = new ArrayList < SystemEventListener > ( 1 ) ; list . add ( context . getExceptionHandler ( ) ) ; listener = Collections . unmodifiableList ( list ) ; } return listener ; |
public class MediaTypeUtils { /** * Returns true if the given extension is considered " safe " ; e . g . is not associated with
* executable or scripting files
* @ param extension
* file extension
* @ return true if given extension is not associated with executable code */
public static boolean isSafeExtension ( final String extension ) { } } | return extension != null && VALID_EXTENSION . matcher ( extension ) . matches ( ) && ! DANGEROUS_EXTENSIONS . contains ( extension ) ; |
public class ChannelzService { /** * Returns a top level channel aka { @ link io . grpc . ManagedChannel } . */
@ Override public void getChannel ( GetChannelRequest request , StreamObserver < GetChannelResponse > responseObserver ) { } } | InternalInstrumented < ChannelStats > s = channelz . getRootChannel ( request . getChannelId ( ) ) ; if ( s == null ) { responseObserver . onError ( Status . NOT_FOUND . withDescription ( "Can't find channel " + request . getChannelId ( ) ) . asRuntimeException ( ) ) ; return ; } GetChannelResponse resp ; try { resp = GetChannelResponse . newBuilder ( ) . setChannel ( ChannelzProtoUtil . toChannel ( s ) ) . build ( ) ; } catch ( StatusRuntimeException e ) { responseObserver . onError ( e ) ; return ; } responseObserver . onNext ( resp ) ; responseObserver . onCompleted ( ) ; |
public class TimePickerDialog { /** * Create a tree for deciding what keys can legally be typed . */
private void generateLegalTimesTree ( ) { } } | // Create a quick cache of numbers to their keycodes .
int k0 = KeyEvent . KEYCODE_0 ; int k1 = KeyEvent . KEYCODE_1 ; int k2 = KeyEvent . KEYCODE_2 ; int k3 = KeyEvent . KEYCODE_3 ; int k4 = KeyEvent . KEYCODE_4 ; int k5 = KeyEvent . KEYCODE_5 ; int k6 = KeyEvent . KEYCODE_6 ; int k7 = KeyEvent . KEYCODE_7 ; int k8 = KeyEvent . KEYCODE_8 ; int k9 = KeyEvent . KEYCODE_9 ; // The root of the tree doesn ' t contain any numbers .
mLegalTimesTree = new Node ( ) ; if ( mIs24HourMode ) { // We ' ll be re - using these nodes , so we ' ll save them .
Node minuteFirstDigit = new Node ( k0 , k1 , k2 , k3 , k4 , k5 ) ; Node minuteSecondDigit = new Node ( k0 , k1 , k2 , k3 , k4 , k5 , k6 , k7 , k8 , k9 ) ; // The first digit must be followed by the second digit .
minuteFirstDigit . addChild ( minuteSecondDigit ) ; // The first digit may be 0-1.
Node firstDigit = new Node ( k0 , k1 ) ; mLegalTimesTree . addChild ( firstDigit ) ; // When the first digit is 0-1 , the second digit may be 0-5.
Node secondDigit = new Node ( k0 , k1 , k2 , k3 , k4 , k5 ) ; firstDigit . addChild ( secondDigit ) ; // We may now be followed by the first minute digit . E . g . 00:09 , 15:58.
secondDigit . addChild ( minuteFirstDigit ) ; // When the first digit is 0-1 , and the second digit is 0-5 , the third digit may be 6-9.
Node thirdDigit = new Node ( k6 , k7 , k8 , k9 ) ; // The time must now be finished . E . g . 0:55 , 1:08.
secondDigit . addChild ( thirdDigit ) ; // When the first digit is 0-1 , the second digit may be 6-9.
secondDigit = new Node ( k6 , k7 , k8 , k9 ) ; firstDigit . addChild ( secondDigit ) ; // We must now be followed by the first minute digit . E . g . 06:50 , 18:20.
secondDigit . addChild ( minuteFirstDigit ) ; // The first digit may be 2.
firstDigit = new Node ( k2 ) ; mLegalTimesTree . addChild ( firstDigit ) ; // When the first digit is 2 , the second digit may be 0-3.
secondDigit = new Node ( k0 , k1 , k2 , k3 ) ; firstDigit . addChild ( secondDigit ) ; // We must now be followed by the first minute digit . E . g . 20:50 , 23:09.
secondDigit . addChild ( minuteFirstDigit ) ; // When the first digit is 2 , the second digit may be 4-5.
secondDigit = new Node ( k4 , k5 ) ; firstDigit . addChild ( secondDigit ) ; // We must now be followd by the last minute digit . E . g . 2:40 , 2:53.
secondDigit . addChild ( minuteSecondDigit ) ; // The first digit may be 3-9.
firstDigit = new Node ( k3 , k4 , k5 , k6 , k7 , k8 , k9 ) ; mLegalTimesTree . addChild ( firstDigit ) ; // We must now be followed by the first minute digit . E . g . 3:57 , 8:12.
firstDigit . addChild ( minuteFirstDigit ) ; } else { // We ' ll need to use the AM / PM node a lot .
// Set up AM and PM to respond to " a " and " p " .
Node ampm = new Node ( getAmOrPmKeyCode ( AM ) , getAmOrPmKeyCode ( PM ) ) ; // The first hour digit may be 1.
Node firstDigit = new Node ( k1 ) ; mLegalTimesTree . addChild ( firstDigit ) ; // We ' ll allow quick input of on - the - hour times . E . g . 1pm .
firstDigit . addChild ( ampm ) ; // When the first digit is 1 , the second digit may be 0-2.
Node secondDigit = new Node ( k0 , k1 , k2 ) ; firstDigit . addChild ( secondDigit ) ; // Also for quick input of on - the - hour times . E . g . 10pm , 12am .
secondDigit . addChild ( ampm ) ; // When the first digit is 1 , and the second digit is 0-2 , the third digit may be 0-5.
Node thirdDigit = new Node ( k0 , k1 , k2 , k3 , k4 , k5 ) ; secondDigit . addChild ( thirdDigit ) ; // The time may be finished now . E . g . 1:02pm , 1:25am .
thirdDigit . addChild ( ampm ) ; // When the first digit is 1 , the second digit is 0-2 , and the third digit is 0-5,
// the fourth digit may be 0-9.
Node fourthDigit = new Node ( k0 , k1 , k2 , k3 , k4 , k5 , k6 , k7 , k8 , k9 ) ; thirdDigit . addChild ( fourthDigit ) ; // The time must be finished now . E . g . 10:49am , 12:40pm .
fourthDigit . addChild ( ampm ) ; // When the first digit is 1 , and the second digit is 0-2 , the third digit may be 6-9.
thirdDigit = new Node ( k6 , k7 , k8 , k9 ) ; secondDigit . addChild ( thirdDigit ) ; // The time must be finished now . E . g . 1:08am , 1:26pm .
thirdDigit . addChild ( ampm ) ; // When the first digit is 1 , the second digit may be 3-5.
secondDigit = new Node ( k3 , k4 , k5 ) ; firstDigit . addChild ( secondDigit ) ; // When the first digit is 1 , and the second digit is 3-5 , the third digit may be 0-9.
thirdDigit = new Node ( k0 , k1 , k2 , k3 , k4 , k5 , k6 , k7 , k8 , k9 ) ; secondDigit . addChild ( thirdDigit ) ; // The time must be finished now . E . g . 1:39am , 1:50pm .
thirdDigit . addChild ( ampm ) ; // The hour digit may be 2-9.
firstDigit = new Node ( k2 , k3 , k4 , k5 , k6 , k7 , k8 , k9 ) ; mLegalTimesTree . addChild ( firstDigit ) ; // We ' ll allow quick input of on - the - hour - times . E . g . 2am , 5pm .
firstDigit . addChild ( ampm ) ; // When the first digit is 2-9 , the second digit may be 0-5.
secondDigit = new Node ( k0 , k1 , k2 , k3 , k4 , k5 ) ; firstDigit . addChild ( secondDigit ) ; // When the first digit is 2-9 , and the second digit is 0-5 , the third digit may be 0-9.
thirdDigit = new Node ( k0 , k1 , k2 , k3 , k4 , k5 , k6 , k7 , k8 , k9 ) ; secondDigit . addChild ( thirdDigit ) ; // The time must be finished now . E . g . 2:57am , 9:30pm .
thirdDigit . addChild ( ampm ) ; } |
public class ICalendar { /** * Sets the type of < a href = " http : / / tools . ietf . org / html / rfc5546 " > iTIP < / a >
* request that this iCalendar object represents , or the value of the
* " Content - Type " header ' s " method " parameter if the iCalendar object is
* defined as a MIME message entity .
* @ param method the method or null to remove
* @ return the property that was created
* @ see < a href = " http : / / tools . ietf . org / html / rfc5546 " > RFC 5546 < / a >
* @ see < a href = " http : / / tools . ietf . org / html / rfc5545 # page - 77 " > RFC 5545
* p . 77-8 < / a >
* @ see < a href = " http : / / tools . ietf . org / html / rfc2445 # page - 74 " > RFC 2445
* p . 74-5 < / a > */
public Method setMethod ( String method ) { } } | Method property = ( method == null ) ? null : new Method ( method ) ; setMethod ( property ) ; return property ; |
public class EmbedBuilder { /** * Sets the image of the embed .
* @ param image The image .
* @ param fileType The type of the file , e . g . " png " or " gif " .
* @ return The current instance in order to chain call methods . */
public EmbedBuilder setImage ( BufferedImage image , String fileType ) { } } | delegate . setImage ( image , fileType ) ; return this ; |
public class ImmutableValueMap { /** * Returns an immutable map containing the same entries as { @ code map } . If { @ code map } somehow contains entries with
* duplicate keys ( for example , if
* it is a { @ code SortedMap } whose comparator is not < i > consistent with
* equals < / i > ) , the results of this method are undefined .
* Despite the method name , this method attempts to avoid actually copying the data when it is safe to do so . The
* exact circumstances under which a copy will or will not be performed are undocumented and subject to change .
* @ param map Map
* @ return ImmutableValueMap
* @ throws NullPointerException if any key or value in { @ code map } is null */
public static @ NotNull ImmutableValueMap copyOf ( @ NotNull Map < String , Object > map ) { } } | if ( map instanceof ValueMap ) { return new ImmutableValueMap ( ( ValueMap ) map ) ; } else { return new ImmutableValueMap ( map ) ; } |
public class ApprovalDAO { /** * Log Modification statements to History
* @ param modified which CRUD action was done
* @ param data entity data that needs a log entry
* @ param overrideMessage if this is specified , we use it rather than crafting a history message based on data */
@ Override protected void wasModified ( AuthzTrans trans , CRUD modified , Data data , String ... override ) { } } | boolean memo = override . length > 0 && override [ 0 ] != null ; boolean subject = override . length > 1 && override [ 1 ] != null ; HistoryDAO . Data hd = HistoryDAO . newInitedData ( ) ; hd . user = trans . user ( ) ; hd . action = modified . name ( ) ; hd . target = TABLE ; hd . subject = subject ? override [ 1 ] : data . user + "|" + data . approver ; hd . memo = memo ? String . format ( "%s by %s" , override [ 0 ] , hd . user ) : ( modified . name ( ) + "d approval for " + data . user ) ; // Detail ?
// Reconstruct ?
if ( historyDAO . create ( trans , hd ) . status != Status . OK ) { trans . error ( ) . log ( "Cannot log to History" ) ; } |
public class LocalDirAllocator { /** * Get a path from the local FS . Pass size as - 1 if not known apriori . We
* round - robin over the set of disks ( via the configured dirs ) and return
* the first complete path which has enough space
* @ param pathStr the requested path ( this will be created on the first
* available disk )
* @ param size the size of the file that is going to be written
* @ param conf the Configuration object
* @ return the complete path to the file on a local disk
* @ throws IOException */
public Path getLocalPathForWrite ( String pathStr , long size , Configuration conf ) throws IOException { } } | AllocatorPerContext context = obtainContext ( contextCfgItemName ) ; return context . getLocalPathForWrite ( pathStr , size , conf ) ; |
public class ScatterGatherIteration { /** * Creates the operator that represents this scatter - gather graph computation for a vertex with in
* and out degrees added to the vertex value .
* @ param graph
* @ param messagingDirection
* @ param messageTypeInfo
* @ param numberOfVertices
* @ return the operator */
@ SuppressWarnings ( "serial" ) private DataSet < Vertex < K , VV > > createResultVerticesWithDegrees ( Graph < K , VV , EV > graph , EdgeDirection messagingDirection , TypeInformation < Tuple2 < K , Message > > messageTypeInfo , DataSet < LongValue > numberOfVertices ) { } } | DataSet < Tuple2 < K , Message > > messages ; this . gatherFunction . setOptDegrees ( this . configuration . isOptDegrees ( ) ) ; DataSet < Tuple2 < K , LongValue > > inDegrees = graph . inDegrees ( ) ; DataSet < Tuple2 < K , LongValue > > outDegrees = graph . outDegrees ( ) ; DataSet < Tuple3 < K , LongValue , LongValue > > degrees = inDegrees . join ( outDegrees ) . where ( 0 ) . equalTo ( 0 ) . with ( new FlatJoinFunction < Tuple2 < K , LongValue > , Tuple2 < K , LongValue > , Tuple3 < K , LongValue , LongValue > > ( ) { @ Override public void join ( Tuple2 < K , LongValue > first , Tuple2 < K , LongValue > second , Collector < Tuple3 < K , LongValue , LongValue > > out ) { out . collect ( new Tuple3 < > ( first . f0 , first . f1 , second . f1 ) ) ; } } ) . withForwardedFieldsFirst ( "f0;f1" ) . withForwardedFieldsSecond ( "f1" ) ; DataSet < Vertex < K , Tuple3 < VV , LongValue , LongValue > > > verticesWithDegrees = initialVertices . join ( degrees ) . where ( 0 ) . equalTo ( 0 ) . with ( new FlatJoinFunction < Vertex < K , VV > , Tuple3 < K , LongValue , LongValue > , Vertex < K , Tuple3 < VV , LongValue , LongValue > > > ( ) { @ Override public void join ( Vertex < K , VV > vertex , Tuple3 < K , LongValue , LongValue > degrees , Collector < Vertex < K , Tuple3 < VV , LongValue , LongValue > > > out ) throws Exception { out . collect ( new Vertex < > ( vertex . getId ( ) , new Tuple3 < > ( vertex . getValue ( ) , degrees . f1 , degrees . f2 ) ) ) ; } } ) . withForwardedFieldsFirst ( "f0" ) ; // add type info
TypeInformation < Vertex < K , Tuple3 < VV , LongValue , LongValue > > > vertexTypes = verticesWithDegrees . getType ( ) ; final DeltaIteration < Vertex < K , Tuple3 < VV , LongValue , LongValue > > , Vertex < K , Tuple3 < VV , LongValue , LongValue > > > iteration = verticesWithDegrees . iterateDelta ( verticesWithDegrees , this . maximumNumberOfIterations , 0 ) ; setUpIteration ( iteration ) ; switch ( messagingDirection ) { case IN : messages = buildScatterFunctionVerticesWithDegrees ( iteration , messageTypeInfo , 1 , 0 , numberOfVertices ) ; break ; case OUT : messages = buildScatterFunctionVerticesWithDegrees ( iteration , messageTypeInfo , 0 , 0 , numberOfVertices ) ; break ; case ALL : messages = buildScatterFunctionVerticesWithDegrees ( iteration , messageTypeInfo , 1 , 0 , numberOfVertices ) . union ( buildScatterFunctionVerticesWithDegrees ( iteration , messageTypeInfo , 0 , 0 , numberOfVertices ) ) ; break ; default : throw new IllegalArgumentException ( "Illegal edge direction" ) ; } @ SuppressWarnings ( { "unchecked" , "rawtypes" } ) GatherUdf < K , Tuple3 < VV , LongValue , LongValue > , Message > updateUdf = new GatherUdfVVWithDegrees ( gatherFunction , vertexTypes ) ; // build the update function ( co group )
CoGroupOperator < ? , ? , Vertex < K , Tuple3 < VV , LongValue , LongValue > > > updates = messages . coGroup ( iteration . getSolutionSet ( ) ) . where ( 0 ) . equalTo ( 0 ) . with ( updateUdf ) ; if ( this . configuration != null && this . configuration . isOptNumVertices ( ) ) { updates = updates . withBroadcastSet ( numberOfVertices , "number of vertices" ) ; } configureUpdateFunction ( updates ) ; return iteration . closeWith ( updates , updates ) . map ( new MapFunction < Vertex < K , Tuple3 < VV , LongValue , LongValue > > , Vertex < K , VV > > ( ) { public Vertex < K , VV > map ( Vertex < K , Tuple3 < VV , LongValue , LongValue > > vertex ) { return new Vertex < > ( vertex . getId ( ) , vertex . getValue ( ) . f0 ) ; } } ) ; |
public class systembackup { /** * Use this API to create systembackup resources . */
public static base_responses create ( nitro_service client , systembackup resources [ ] ) throws Exception { } } | base_responses result = null ; if ( resources != null && resources . length > 0 ) { systembackup createresources [ ] = new systembackup [ resources . length ] ; for ( int i = 0 ; i < resources . length ; i ++ ) { createresources [ i ] = new systembackup ( ) ; createresources [ i ] . filename = resources [ i ] . filename ; createresources [ i ] . level = resources [ i ] . level ; createresources [ i ] . comment = resources [ i ] . comment ; } result = perform_operation_bulk_request ( client , createresources , "create" ) ; } return result ; |
public class FlinkAggregateExpandDistinctAggregatesRule { /** * Converts all distinct aggregate calls to a given set of arguments .
* < p > This method is called several times , one for each set of arguments .
* Each time it is called , it generates a JOIN to a new SELECT DISTINCT
* relational expression , and modifies the set of top - level calls .
* @ param aggregate Original aggregate
* @ param n Ordinal of this in a join . { @ code relBuilder } contains the
* input relational expression ( either the original
* aggregate , the output from the previous call to this
* method . { @ code n } is 0 if we ' re converting the
* first distinct aggregate in a query with no non - distinct
* aggregates )
* @ param argList Arguments to the distinct aggregate function
* @ param filterArg Argument that filters input to aggregate function , or - 1
* @ param refs Array of expressions which will be the projected by the
* result of this rule . Those relating to this arg list will
* be modified @ return Relational expression */
private void doRewrite ( RelBuilder relBuilder , Aggregate aggregate , int n , List < Integer > argList , int filterArg , List < RexInputRef > refs ) { } } | final RexBuilder rexBuilder = aggregate . getCluster ( ) . getRexBuilder ( ) ; final List < RelDataTypeField > leftFields ; if ( n == 0 ) { leftFields = null ; } else { leftFields = relBuilder . peek ( ) . getRowType ( ) . getFieldList ( ) ; } // Aggregate (
// child ,
// { COUNT ( DISTINCT 1 ) , SUM ( DISTINCT 1 ) , SUM ( 2 ) } )
// becomes
// Aggregate (
// Join (
// child ,
// Aggregate ( child , < all columns > { } ) ,
// INNER ,
// < f2 = f5 > ) )
// E . g .
// SELECT deptno , SUM ( DISTINCT sal ) , COUNT ( DISTINCT gender ) , MAX ( age )
// FROM Emps
// GROUP BY deptno
// becomes
// SELECT e . deptno , adsal . sum _ sal , adgender . count _ gender , e . max _ age
// FROM (
// SELECT deptno , MAX ( age ) as max _ age
// FROM Emps GROUP BY deptno ) AS e
// JOIN (
// SELECT deptno , COUNT ( gender ) AS count _ gender FROM (
// SELECT DISTINCT deptno , gender FROM Emps ) AS dgender
// GROUP BY deptno ) AS adgender
// ON e . deptno = adgender . deptno
// JOIN (
// SELECT deptno , SUM ( sal ) AS sum _ sal FROM (
// SELECT DISTINCT deptno , sal FROM Emps ) AS dsal
// GROUP BY deptno ) AS adsal
// ON e . deptno = adsal . deptno
// GROUP BY e . deptno
// Note that if a query contains no non - distinct aggregates , then the
// very first join / group by is omitted . In the example above , if
// MAX ( age ) is removed , then the sub - select of " e " is not needed , and
// instead the two other group by ' s are joined to one another .
// Project the columns of the GROUP BY plus the arguments
// to the agg function .
final Map < Integer , Integer > sourceOf = new HashMap < > ( ) ; createSelectDistinct ( relBuilder , aggregate , argList , filterArg , sourceOf ) ; // Now compute the aggregate functions on top of the distinct dataset .
// Each distinct agg becomes a non - distinct call to the corresponding
// field from the right ; for example ,
// " COUNT ( DISTINCT e . sal ) "
// becomes
// " COUNT ( distinct _ e . sal ) " .
final List < AggregateCall > aggCallList = new ArrayList < > ( ) ; final List < AggregateCall > aggCalls = aggregate . getAggCallList ( ) ; final int groupAndIndicatorCount = aggregate . getGroupCount ( ) + aggregate . getIndicatorCount ( ) ; int i = groupAndIndicatorCount - 1 ; for ( AggregateCall aggCall : aggCalls ) { ++ i ; // Ignore agg calls which are not distinct or have the wrong set
// arguments . If we ' re rewriting aggs whose args are { sal } , we will
// rewrite COUNT ( DISTINCT sal ) and SUM ( DISTINCT sal ) but ignore
// COUNT ( DISTINCT gender ) or SUM ( sal ) .
if ( ! aggCall . isDistinct ( ) ) { continue ; } if ( ! aggCall . getArgList ( ) . equals ( argList ) ) { continue ; } // Re - map arguments .
final int argCount = aggCall . getArgList ( ) . size ( ) ; final List < Integer > newArgs = new ArrayList < > ( argCount ) ; for ( int j = 0 ; j < argCount ; j ++ ) { final Integer arg = aggCall . getArgList ( ) . get ( j ) ; newArgs . add ( sourceOf . get ( arg ) ) ; } final int newFilterArg = aggCall . filterArg >= 0 ? sourceOf . get ( aggCall . filterArg ) : - 1 ; final AggregateCall newAggCall = AggregateCall . create ( aggCall . getAggregation ( ) , false , aggCall . isApproximate ( ) , newArgs , newFilterArg , aggCall . getType ( ) , aggCall . getName ( ) ) ; assert refs . get ( i ) == null ; if ( n == 0 ) { refs . set ( i , new RexInputRef ( groupAndIndicatorCount + aggCallList . size ( ) , newAggCall . getType ( ) ) ) ; } else { refs . set ( i , new RexInputRef ( leftFields . size ( ) + groupAndIndicatorCount + aggCallList . size ( ) , newAggCall . getType ( ) ) ) ; } aggCallList . add ( newAggCall ) ; } final Map < Integer , Integer > map = new HashMap < > ( ) ; for ( Integer key : aggregate . getGroupSet ( ) ) { map . put ( key , map . size ( ) ) ; } final ImmutableBitSet newGroupSet = aggregate . getGroupSet ( ) . permute ( map ) ; assert newGroupSet . equals ( ImmutableBitSet . range ( aggregate . getGroupSet ( ) . cardinality ( ) ) ) ; com . google . common . collect . ImmutableList < ImmutableBitSet > newGroupingSets = null ; if ( aggregate . indicator ) { newGroupingSets = ImmutableBitSet . ORDERING . immutableSortedCopy ( ImmutableBitSet . permute ( aggregate . getGroupSets ( ) , map ) ) ; } relBuilder . push ( aggregate . copy ( aggregate . getTraitSet ( ) , relBuilder . build ( ) , aggregate . indicator , newGroupSet , newGroupingSets , aggCallList ) ) ; // If there ' s no left child yet , no need to create the join
if ( n == 0 ) { return ; } // Create the join condition . It is of the form
// ' left . f0 = right . f0 and left . f1 = right . f1 and . . . '
// where { f0 , f1 , . . . } are the GROUP BY fields .
final List < RelDataTypeField > distinctFields = relBuilder . peek ( ) . getRowType ( ) . getFieldList ( ) ; final List < RexNode > conditions = com . google . common . collect . Lists . newArrayList ( ) ; for ( i = 0 ; i < groupAndIndicatorCount ; ++ i ) { // null values form its own group
// use " is not distinct from " so that the join condition
// allows null values to match .
conditions . add ( rexBuilder . makeCall ( SqlStdOperatorTable . IS_NOT_DISTINCT_FROM , RexInputRef . of ( i , leftFields ) , new RexInputRef ( leftFields . size ( ) + i , distinctFields . get ( i ) . getType ( ) ) ) ) ; } // Join in the new ' select distinct ' relation .
relBuilder . join ( JoinRelType . INNER , conditions ) ; |
public class DataSlice { /** * This is a CUSTOM serialisation method for the DataSlice
* object .
* We only need to write the bytes that are part of the payload .
* Any bytes that are not contained by the section defined by
* the offset and length values are not stored to disk . The
* length is also written for use upon reading the data from
* disk .
* @ param out
* @ exception IOException */
private void writeObject ( ObjectOutputStream out ) throws IOException { } } | out . writeInt ( _length ) ; out . write ( _bytes , _offset , _length ) ; |
public class AResource { /** * Returns a result , depending on the query parameters .
* @ param impl
* implementation
* @ param path
* path info
* @ return parameter map */
Response createResponse ( final JaxRx impl , final ResourcePath path ) { } } | final StreamingOutput out = createOutput ( impl , path ) ; // change media type , dependent on WRAP value
final boolean wrap = path . getValue ( QueryParameter . WRAP ) == null || path . getValue ( QueryParameter . WRAP ) . equals ( "yes" ) ; String type = wrap ? MediaType . APPLICATION_XML : MediaType . TEXT_PLAIN ; // overwrite type if METHOD or MEDIA - TYPE parameters are specified
final String op = path . getValue ( QueryParameter . OUTPUT ) ; if ( op != null ) { final Scanner sc = new Scanner ( op ) ; sc . useDelimiter ( "," ) ; while ( sc . hasNext ( ) ) { final String [ ] sp = sc . next ( ) . split ( "=" , 2 ) ; if ( sp . length == 1 ) continue ; if ( sp [ 0 ] . equals ( METHOD ) ) { for ( final String [ ] m : METHODS ) if ( sp [ 1 ] . equals ( m [ 0 ] ) ) type = m [ 1 ] ; } else if ( sp [ 0 ] . equals ( MEDIATYPE ) ) { type = sp [ 1 ] ; } } } // check validity of media type
MediaType mt = null ; try { mt = MediaType . valueOf ( type ) ; } catch ( final IllegalArgumentException ex ) { throw new JaxRxException ( 400 , ex . getMessage ( ) ) ; } return Response . ok ( out , mt ) . build ( ) ; |
public class JDBCStorableGenerator { /** * Generates code which emulates this :
* / / May throw FetchException
* JDBCConnectionCapability . yieldConnection ( con ) ;
* @ param capVar required reference to JDBCConnectionCapability
* @ param conVar optional connection to yield */
private void yieldConnection ( CodeBuilder b , LocalVariable capVar , LocalVariable conVar ) { } } | if ( conVar != null ) { b . loadLocal ( capVar ) ; b . loadLocal ( conVar ) ; b . invokeInterface ( TypeDesc . forClass ( JDBCConnectionCapability . class ) , "yieldConnection" , null , new TypeDesc [ ] { TypeDesc . forClass ( Connection . class ) } ) ; } |
public class FastMath { /** * Compute the hyperbolic sine of a number .
* @ param x number on which evaluation is done
* @ return hyperbolic sine of x */
public static double sinh ( double x ) { } } | boolean negate = false ; if ( x != x ) { return x ; } // sinh [ z ] = ( exp ( z ) - exp ( - z ) / 2
// for values of z larger than about 20,
// exp ( - z ) can be ignored in comparison with exp ( z )
if ( x > 20 ) { if ( x >= LOG_MAX_VALUE ) { // Avoid overflow ( MATH - 905 ) .
final double t = exp ( 0.5 * x ) ; return ( 0.5 * t ) * t ; } else { return 0.5 * exp ( x ) ; } } else if ( x < - 20 ) { if ( x <= - LOG_MAX_VALUE ) { // Avoid overflow ( MATH - 905 ) .
final double t = exp ( - 0.5 * x ) ; return ( - 0.5 * t ) * t ; } else { return - 0.5 * exp ( - x ) ; } } if ( x == 0 ) { return x ; } if ( x < 0.0 ) { x = - x ; negate = true ; } double result ; if ( x > 0.25 ) { double hiPrec [ ] = new double [ 2 ] ; exp ( x , 0.0 , hiPrec ) ; double ya = hiPrec [ 0 ] + hiPrec [ 1 ] ; double yb = - ( ya - hiPrec [ 0 ] - hiPrec [ 1 ] ) ; double temp = ya * HEX_40000000 ; double yaa = ya + temp - temp ; double yab = ya - yaa ; // recip = 1 / y
double recip = 1.0 / ya ; temp = recip * HEX_40000000 ; double recipa = recip + temp - temp ; double recipb = recip - recipa ; // Correct for rounding in division
recipb += ( 1.0 - yaa * recipa - yaa * recipb - yab * recipa - yab * recipb ) * recip ; // Account for yb
recipb += - yb * recip * recip ; recipa = - recipa ; recipb = - recipb ; // y = y + 1 / y
temp = ya + recipa ; yb += - ( temp - ya - recipa ) ; ya = temp ; temp = ya + recipb ; yb += - ( temp - ya - recipb ) ; ya = temp ; result = ya + yb ; result *= 0.5 ; } else { double hiPrec [ ] = new double [ 2 ] ; expm1 ( x , hiPrec ) ; double ya = hiPrec [ 0 ] + hiPrec [ 1 ] ; double yb = - ( ya - hiPrec [ 0 ] - hiPrec [ 1 ] ) ; /* Compute expm1 ( - x ) = - expm1 ( x ) / ( expm1 ( x ) + 1) */
double denom = 1.0 + ya ; double denomr = 1.0 / denom ; double denomb = - ( denom - 1.0 - ya ) + yb ; double ratio = ya * denomr ; double temp = ratio * HEX_40000000 ; double ra = ratio + temp - temp ; double rb = ratio - ra ; temp = denom * HEX_40000000 ; double za = denom + temp - temp ; double zb = denom - za ; rb += ( ya - za * ra - za * rb - zb * ra - zb * rb ) * denomr ; // Adjust for yb
rb += yb * denomr ; // numerator
rb += - ya * denomb * denomr * denomr ; // denominator
// y = y - 1 / y
temp = ya + ra ; yb += - ( temp - ya - ra ) ; ya = temp ; temp = ya + rb ; yb += - ( temp - ya - rb ) ; ya = temp ; result = ya + yb ; result *= 0.5 ; } if ( negate ) { result = - result ; } return result ; |
public class BeanCopierFactory { /** * 构建方法体实现 */
private static String getMethodImplCode ( Integer sequence , Class < ? > sourceClass , Class < ? > targetClass , boolean deepCopy , final Map < String , PropConverter < ? , ? > > propCvtMap ) throws Exception { } } | StringBuilder methodCode = new StringBuilder ( ) ; methodCode . append ( "public void copyProps(" ) . append ( Object . class . getName ( ) ) . append ( " sourceObj, " ) . append ( Object . class . getName ( ) ) . append ( " targetObj){\n" ) ; methodCode . append ( sourceClass . getName ( ) ) . append ( " source = " ) . append ( "(" ) . append ( sourceClass . getName ( ) ) . append ( ")sourceObj;\n" ) ; methodCode . append ( targetClass . getName ( ) ) . append ( " target = " ) . append ( "(" ) . append ( targetClass . getName ( ) ) . append ( ")targetObj;\n" ) ; // 这里查找了包括父类的属性
Field [ ] targetFields = ReflectionUtils . findFields ( targetClass ) ; for ( Field field : targetFields ) { if ( ! Modifier . isStatic ( field . getModifiers ( ) ) ) { // 是否含有set方法
String methodNameSuffix = capitalize ( field . getName ( ) ) ; Class < ? > targetFieldClass = field . getType ( ) ; Method setMethod = ReflectionUtils . findMethod ( targetClass , "set" + methodNameSuffix , targetFieldClass ) ; if ( setMethod == null ) { setMethod = ReflectionUtils . findMethod ( targetClass , "set" + field . getName ( ) , targetFieldClass ) ; if ( setMethod != null ) { methodNameSuffix = field . getName ( ) ; } } if ( setMethod != null ) { // 查看这个属性是否有 PropConverter
if ( propCvtMap != null && propCvtMap . containsKey ( field . getName ( ) ) ) { String converterName = field . getName ( ) + "Converter" ; String converterType = PropConverter . class . getName ( ) ; methodCode . append ( converterType ) . append ( " " ) . append ( converterName ) . append ( " = (" ) . append ( converterType ) . append ( ")" ) . append ( BeanCopierFactory . class . getName ( ) ) . append ( ".getConverter(" ) . append ( sequence ) . append ( "," ) . append ( "\"" ) . append ( field . getName ( ) ) . append ( "\");\n" ) ; methodCode . append ( "target." ) . append ( setMethod . getName ( ) ) . append ( "(" ) . append ( "(" ) . append ( targetFieldClass . getName ( ) ) . append ( ")" ) . append ( converterName ) . append ( ".convert(" ) . append ( "source" ) . append ( ")" ) . append ( ");\n" ) ; continue ; } // set方法存在 , 看是否 sourceClass 有get方法或者is方法
Method getMethod = ReflectionUtils . findMethod ( sourceClass , "get" + methodNameSuffix ) ; if ( getMethod == null && ( targetFieldClass == boolean . class || targetFieldClass == Boolean . class ) ) { getMethod = ReflectionUtils . findMethod ( sourceClass , "is" + methodNameSuffix ) ; } if ( getMethod == null ) { continue ; } // 查看返回值是否相同 , 不完全相同是否可以转换
if ( getMethod . getReturnType ( ) == targetFieldClass ) { if ( ! deepCopy ) { methodCode . append ( "target." ) . append ( setMethod . getName ( ) ) . append ( "(" ) . append ( "source." ) . append ( getMethod . getName ( ) ) . append ( "()" ) . append ( ");\n" ) ; } else { if ( ClassHelper . isPrimitiveType ( targetFieldClass ) || ClassHelper . isPrimitiveWrapperType ( targetFieldClass ) || targetFieldClass == String . class ) { methodCode . append ( "target." ) . append ( setMethod . getName ( ) ) . append ( "(" ) . append ( "source." ) . append ( getMethod . getName ( ) ) . append ( "()" ) . append ( ");\n" ) ; } else { // 深度复制 , 对于非基本类型的采用流的方式拷贝
methodCode . append ( "target." ) . append ( setMethod . getName ( ) ) . append ( "(" ) . append ( "(" ) . append ( targetFieldClass . getName ( ) ) . append ( ")" ) . append ( BeanUtils . class . getName ( ) ) . append ( ".deepClone(" ) . append ( "source." ) . append ( getMethod . getName ( ) ) . append ( "()" ) . append ( ")" ) . append ( ");\n" ) ; } } } else if ( ClassHelper . isPrimitiveType ( targetFieldClass ) && ClassHelper . getPrimitiveTypeByWrapper ( getMethod . getReturnType ( ) ) == targetFieldClass ) { // 类似 target . setInt ( source . getInt ( ) = = null ? 0 : source . getInt ( ) ) ;
methodCode . append ( "target." ) . append ( setMethod . getName ( ) ) . append ( "(" ) ; methodCode . append ( "source." ) . append ( getMethod . getName ( ) ) . append ( "() == null ? " ) . append ( String . valueOf ( ClassHelper . getPrimitiveDftValue ( targetFieldClass ) ) ) . append ( " : " ) . append ( "source." ) . append ( getMethod . getName ( ) ) . append ( "()" ) ; methodCode . append ( ");\n" ) ; } else if ( ClassHelper . isPrimitiveWrapperType ( targetFieldClass ) && ClassHelper . getWrapperTypeByPrimitive ( getMethod . getReturnType ( ) ) == targetFieldClass ) { methodCode . append ( "target." ) . append ( setMethod . getName ( ) ) . append ( "(" ) . append ( "source." ) . append ( getMethod . getName ( ) ) . append ( "()" ) . append ( ");\n" ) ; } } } } methodCode . append ( "}" ) ; return methodCode . toString ( ) ; |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.