signature stringlengths 43 39.1k | implementation stringlengths 0 450k |
|---|---|
public class CommerceNotificationQueueEntryPersistenceImpl { /** * Returns all the commerce notification queue entries where commerceNotificationTemplateId = & # 63 ; .
* @ param commerceNotificationTemplateId the commerce notification template ID
* @ return the matching commerce notification queue entries */
@ Override public List < CommerceNotificationQueueEntry > findByCommerceNotificationTemplateId ( long commerceNotificationTemplateId ) { } } | return findByCommerceNotificationTemplateId ( commerceNotificationTemplateId , QueryUtil . ALL_POS , QueryUtil . ALL_POS , null ) ; |
public class AWSCognitoIdentityProviderClient { /** * Allows the developer to delete the user pool client .
* @ param deleteUserPoolClientRequest
* Represents the request to delete a user pool client .
* @ return Result of the DeleteUserPoolClient operation returned by the service .
* @ throws ResourceNotFoundException
* This exception is thrown when the Amazon Cognito service cannot find the requested resource .
* @ throws InvalidParameterException
* This exception is thrown when the Amazon Cognito service encounters an invalid parameter .
* @ throws TooManyRequestsException
* This exception is thrown when the user has made too many requests for a given operation .
* @ throws NotAuthorizedException
* This exception is thrown when a user is not authorized .
* @ throws InternalErrorException
* This exception is thrown when Amazon Cognito encounters an internal error .
* @ sample AWSCognitoIdentityProvider . DeleteUserPoolClient
* @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / cognito - idp - 2016-04-18 / DeleteUserPoolClient "
* target = " _ top " > AWS API Documentation < / a > */
@ Override public DeleteUserPoolClientResult deleteUserPoolClient ( DeleteUserPoolClientRequest request ) { } } | request = beforeClientExecution ( request ) ; return executeDeleteUserPoolClient ( request ) ; |
public class InterpreterUtils { /** * Deserialize the given python function . If the functions class definition cannot be found we assume that this is
* the first invocation of this method for a given job and load the python script containing the class definition
* via jython .
* @ param context the RuntimeContext of the java function
* @ param serFun serialized python UDF
* @ return deserialized python UDF
* @ throws FlinkException if the deserialization failed */
@ SuppressWarnings ( "unchecked" ) public static < X > X deserializeFunction ( RuntimeContext context , byte [ ] serFun ) throws FlinkException { } } | if ( ! jythonInitialized ) { // This branch is only tested by end - to - end tests
String path = context . getDistributedCache ( ) . getFile ( PythonConstants . FLINK_PYTHON_DC_ID ) . getAbsolutePath ( ) ; String scriptName = PythonStreamExecutionEnvironment . PythonJobParameters . getScriptName ( context . getExecutionConfig ( ) . getGlobalJobParameters ( ) ) ; try { initPythonInterpreter ( new String [ ] { Paths . get ( path , scriptName ) . toString ( ) } , path , scriptName ) ; } catch ( Exception e ) { try { LOG . error ( "Initialization of jython failed." , e ) ; throw new FlinkRuntimeException ( "Initialization of jython failed." , e ) ; } catch ( Exception ie ) { // this may occur if the initial exception relies on jython being initialized properly
LOG . error ( "Initialization of jython failed. Could not print original stacktrace." , ie ) ; throw new FlinkRuntimeException ( "Initialization of jython failed. Could not print original stacktrace." ) ; } } } try { return ( X ) SerializationUtils . deserializeObject ( serFun ) ; } catch ( IOException | ClassNotFoundException ex ) { throw new FlinkException ( "Deserialization of user-function failed." , ex ) ; } |
public class Base64 { /** * Decodes four bytes from array < var > source < / var >
* and writes the resulting bytes ( up to three of them )
* to < var > destination < / var > .
* The source and destination arrays can be manipulated
* anywhere along their length by specifying
* < var > srcOffset < / var > and < var > destOffset < / var > .
* This method does not check to make sure your arrays
* are large enough to accomodate < var > srcOffset < / var > + 4 for
* the < var > source < / var > array or < var > destOffset < / var > + 3 for
* the < var > destination < / var > array .
* This method returns the actual number of bytes that
* were converted from the Base64 encoding .
* < p > This is the lowest level of the decoding methods with
* all possible parameters . < / p >
* @ param source the array to convert
* @ param srcOffset the index where conversion begins
* @ param destination the array to hold the conversion
* @ param destOffset the index where output will be put
* @ return the number of decoded bytes converted */
private final int decode4to3 ( byte [ ] source , int srcOffset , byte [ ] destination , int destOffset ) { } } | // Example : Dk = = or Dk . .
if ( source [ srcOffset + 2 ] == PADDING_CHAR ) { int outBuff = ( ( DECODABET [ source [ srcOffset ] ] & 0xFF ) << 18 ) | ( ( DECODABET [ source [ srcOffset + 1 ] ] & 0xFF ) << 12 ) ; destination [ destOffset ] = ( byte ) ( outBuff >>> 16 ) ; return 1 ; } // Example : DkL = or DkL .
else if ( source [ srcOffset + 3 ] == PADDING_CHAR ) { int outBuff = ( ( DECODABET [ source [ srcOffset ] ] & 0xFF ) << 18 ) | ( ( DECODABET [ source [ srcOffset + 1 ] ] & 0xFF ) << 12 ) | ( ( DECODABET [ source [ srcOffset + 2 ] ] & 0xFF ) << 6 ) ; destination [ destOffset ] = ( byte ) ( outBuff >>> 16 ) ; destination [ destOffset + 1 ] = ( byte ) ( outBuff >>> 8 ) ; return 2 ; } // Example : DkLE
else { int outBuff = ( ( DECODABET [ source [ srcOffset ] ] & 0xFF ) << 18 ) | ( ( DECODABET [ source [ srcOffset + 1 ] ] & 0xFF ) << 12 ) | ( ( DECODABET [ source [ srcOffset + 2 ] ] & 0xFF ) << 6 ) | ( ( DECODABET [ source [ srcOffset + 3 ] ] & 0xFF ) ) ; destination [ destOffset ] = ( byte ) ( outBuff >> 16 ) ; destination [ destOffset + 1 ] = ( byte ) ( outBuff >> 8 ) ; destination [ destOffset + 2 ] = ( byte ) ( outBuff ) ; return 3 ; } |
public class FunctionInputDefBuilder { /** * Adds a new { @ link VarSet } with the given path name and returns a builder
* for the new < CODE > VarSet < / CODE > . */
public VarSetBuilder varSetAtPath ( String [ ] path ) { } } | VarSet varSet = null ; if ( path != null && path . length > 0 ) { varSet = new VarSet ( path [ 0 ] ) ; functionInputDef_ . addVarDef ( varSet ) ; for ( int i = 1 ; i < path . length ; i ++ ) { VarSet child = new VarSet ( path [ i ] ) ; varSet . addMember ( child ) ; varSet = child ; } } return varSet == null ? null : new VarSetBuilder ( varSet ) ; |
public class VersionInfoMojo { /** * Add properties to the project that are replaced . */
private void addProjectProperties ( ) throws MojoExecutionException { } } | Properties projectProps = mavenProject . getProperties ( ) ; projectProps . setProperty ( PROPERTY_NAME_COMPANY , versionInfo . getCompanyName ( ) ) ; projectProps . setProperty ( PROPERTY_NAME_COPYRIGHT , versionInfo . getCopyright ( ) ) ; projectProps . setProperty ( PROPERTY_NAME_VERSION_MAJOR , "0" ) ; projectProps . setProperty ( PROPERTY_NAME_VERSION_MINOR , "0" ) ; projectProps . setProperty ( PROPERTY_NAME_VERSION_INCREMENTAL , "0" ) ; projectProps . setProperty ( PROPERTY_NAME_VERSION_BUILD , "0" ) ; String version = mavenProject . getVersion ( ) ; if ( version != null && version . length ( ) > 0 ) { ArtifactVersion artifactVersion = new DefaultArtifactVersion ( version ) ; if ( version . equals ( artifactVersion . getQualifier ( ) ) ) { String msg = "Unable to parse the version string, please use standard maven version format." ; getLog ( ) . error ( msg ) ; throw new MojoExecutionException ( msg ) ; } projectProps . setProperty ( PROPERTY_NAME_VERSION_MAJOR , String . valueOf ( artifactVersion . getMajorVersion ( ) ) ) ; projectProps . setProperty ( PROPERTY_NAME_VERSION_MINOR , String . valueOf ( artifactVersion . getMinorVersion ( ) ) ) ; projectProps . setProperty ( PROPERTY_NAME_VERSION_INCREMENTAL , String . valueOf ( artifactVersion . getIncrementalVersion ( ) ) ) ; projectProps . setProperty ( PROPERTY_NAME_VERSION_BUILD , String . valueOf ( artifactVersion . getBuildNumber ( ) ) ) ; } else { getLog ( ) . warn ( "Missing version for project. Version parts will be set to 0" ) ; } |
public class CmsMessageBundleEditorOptions { /** * Handles adding a key . Calls the registered listener and wraps it ' s method in some GUI adjustments . */
void handleAddKey ( ) { } } | String key = m_addKeyInput . getValue ( ) ; if ( m_listener . handleAddKey ( key ) ) { Notification . show ( key . isEmpty ( ) ? m_messages . key ( Messages . GUI_NOTIFICATION_MESSAGEBUNDLEEDITOR_EMPTY_KEY_SUCCESSFULLY_ADDED_0 ) : m_messages . key ( Messages . GUI_NOTIFICATION_MESSAGEBUNDLEEDITOR_KEY_SUCCESSFULLY_ADDED_1 , key ) ) ; } else { CmsMessageBundleEditorTypes . showWarning ( m_messages . key ( Messages . GUI_NOTIFICATION_MESSAGEBUNDLEEDITOR_KEY_ALREADEY_EXISTS_CAPTION_0 ) , m_messages . key ( Messages . GUI_NOTIFICATION_MESSAGEBUNDLEEDITOR_KEY_ALREADEY_EXISTS_DESCRIPTION_1 , key ) ) ; } m_addKeyInput . focus ( ) ; m_addKeyInput . selectAll ( ) ; |
public class SftpClient { /** * Rename a file on the remote computer .
* @ param oldpath
* the old path
* @ param newpath
* the new path
* @ throws SftpStatusException
* @ throws SshException */
public void rename ( String oldpath , String newpath ) throws SftpStatusException , SshException { } } | String from = resolveRemotePath ( oldpath ) ; String to = resolveRemotePath ( newpath ) ; SftpFileAttributes attrs = null ; try { attrs = sftp . getAttributes ( to ) ; } catch ( SftpStatusException ex ) { sftp . renameFile ( from , to ) ; return ; } if ( attrs != null && attrs . isDirectory ( ) ) { sftp . renameFile ( from , to ) ; } else { throw new SftpStatusException ( SftpStatusException . SSH_FX_FILE_ALREADY_EXISTS , newpath + " already exists on the remote filesystem" ) ; } |
public class AddonId { /** * Create an { @ link AddonId } from the given name , version , and API version . */
public static AddonId from ( String name , String version , String apiVersion ) { } } | return AddonId . from ( name , SingleVersion . valueOf ( version ) , apiVersion == null || apiVersion . trim ( ) . isEmpty ( ) ? EmptyVersion . getInstance ( ) : SingleVersion . valueOf ( apiVersion ) ) ; |
public class HttpRequest { /** * Write the HTTP request line as it was received . */
public void writeRequestLine ( Writer writer ) throws IOException { } } | writer . write ( _method ) ; writer . write ( ' ' ) ; writer . write ( _uri != null ? _uri . toString ( ) : "null" ) ; writer . write ( ' ' ) ; writer . write ( _version ) ; |
public class CommonUtils { /** * 0 if equals */
public static int compare ( String left , String right ) { } } | if ( left == null ) { if ( right == null ) { return 0 ; } else { return 1 ; // nulls last
} } else if ( right == null ) { return - 1 ; // nulls last
} return left . compareTo ( right ) ; |
public class AbstractMaterialDialogBuilder { /** * Obtains , whether the dividers , which are located above and below the dialog ' s scrollable
* areas , should be shown when scrolling , or not , from a specific theme .
* @ param themeResourceId
* The resource id of the theme , the boolean value should be obtained from , as an { @ link
* Integer } value */
private void obtainShowDividersOnScroll ( @ StyleRes final int themeResourceId ) { } } | TypedArray typedArray = getContext ( ) . getTheme ( ) . obtainStyledAttributes ( themeResourceId , new int [ ] { R . attr . materialDialogShowDividersOnScroll } ) ; showDividersOnScroll ( typedArray . getBoolean ( 0 , true ) ) ; |
public class TagSpecification { /** * { @ link Specification } for retrieving { @ link DistributionSetTag } s by
* assigned { @ link DistributionSet } .
* @ param dsId
* of the distribution set
* @ return the { @ link JpaDistributionSetTag } { @ link Specification } */
public static Specification < JpaDistributionSetTag > ofDistributionSet ( final Long dsId ) { } } | return ( dsRoot , query , criteriaBuilder ) -> { final Join < JpaDistributionSetTag , JpaDistributionSet > tagJoin = dsRoot . join ( JpaDistributionSetTag_ . assignedToDistributionSet ) ; query . distinct ( true ) ; return criteriaBuilder . equal ( tagJoin . get ( JpaDistributionSet_ . id ) , dsId ) ; } ; |
public class FtpMessage { /** * Gets the reply string . */
public String getReplyString ( ) { } } | Object replyString = getHeader ( FtpMessageHeaders . FTP_REPLY_STRING ) ; if ( replyString != null ) { return replyString . toString ( ) ; } return null ; |
public class AfplibPackageImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
public EClass getGBOX ( ) { } } | if ( gboxEClass == null ) { gboxEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( AfplibPackage . eNS_URI ) . getEClassifiers ( ) . get ( 441 ) ; } return gboxEClass ; |
public class NFBuildGraph { /** * Creates an { @ link com . netflix . nfgraph . build . NFBuildGraphNode } for < code > nodeSpec < / code > and < code > ordinal < / code >
* and adds it to < code > nodes < / code > . If such a node exists in < code > nodes < / code > , then that node is returned . */
public NFBuildGraphNode getOrCreateNode ( NFBuildGraphNodeList nodes , NFNodeSpec nodeSpec , int ordinal ) { } } | return nodeCache . getNode ( nodes , nodeSpec , ordinal ) ; |
public class Endpoint { /** * Parse the authority part of a URI . The authority part may have one of the following formats :
* < ul >
* < li > { @ code " group : < groupName > " } for a group endpoint < / li >
* < li > { @ code " < host > : < port > " } for a host endpoint < / li >
* < li > { @ code " < host > " } for a host endpoint with no port number specified < / li >
* < / ul >
* An IPv4 or IPv6 address can be specified in lieu of a host name , e . g . { @ code " 127.0.0.1:8080 " } and
* { @ code " [ : : 1 ] : 8080 " } . */
public static Endpoint parse ( String authority ) { } } | requireNonNull ( authority , "authority" ) ; if ( authority . startsWith ( "group:" ) ) { return ofGroup ( authority . substring ( 6 ) ) ; } final HostAndPort parsed = HostAndPort . fromString ( authority ) . withDefaultPort ( 0 ) ; return create ( parsed . getHost ( ) , parsed . getPort ( ) ) ; |
public class CqlAllRowsQueryImpl { /** * Submit all the callables to the executor by synchronize their execution so they all start
* AFTER the have all been submitted .
* @ param executor
* @ param callables
* @ return */
private List < Future < Boolean > > startTasks ( ExecutorService executor , List < Callable < Boolean > > callables ) { } } | List < Future < Boolean > > tasks = Lists . newArrayList ( ) ; for ( Callable < Boolean > callable : callables ) { tasks . add ( executor . submit ( callable ) ) ; } return tasks ; |
public class StringHelper { /** * Get the first index of cSearch within sText ignoring case .
* @ param sText
* The text to search in . May be < code > null < / code > .
* @ param cSearch
* The char to search for . May be < code > null < / code > .
* @ param aSortLocale
* The locale to be used for case unifying .
* @ return The first index of sSearch within sText or { @ value # STRING _ NOT _ FOUND }
* if sSearch was not found or if any parameter was < code > null < / code > .
* @ see String # indexOf ( int ) */
public static int getIndexOfIgnoreCase ( @ Nullable final String sText , final char cSearch , @ Nonnull final Locale aSortLocale ) { } } | return sText != null && sText . length ( ) >= 1 ? sText . toLowerCase ( aSortLocale ) . indexOf ( Character . toLowerCase ( cSearch ) ) : STRING_NOT_FOUND ; |
public class ChessboardCornerClusterFinder { /** * Converts the internal graphs into unordered chessboard grids . */
private void convertToOutput ( List < ChessboardCorner > corners ) { } } | c2n . resize ( corners . size ( ) ) ; n2c . resize ( vertexes . size ( ) ) ; open . reset ( ) ; n2c . fill ( - 1 ) ; c2n . fill ( - 1 ) ; for ( int seedIdx = 0 ; seedIdx < vertexes . size ; seedIdx ++ ) { Vertex seedN = vertexes . get ( seedIdx ) ; if ( seedN . marked ) continue ; ChessboardCornerGraph graph = clusters . grow ( ) ; graph . reset ( ) ; // traverse the graph and add all the nodes in this cluster
growCluster ( corners , seedIdx , graph ) ; // Connect the nodes together in the output graph
for ( int i = 0 ; i < graph . corners . size ; i ++ ) { ChessboardCornerGraph . Node gn = graph . corners . get ( i ) ; Vertex n = vertexes . get ( n2c . get ( i ) ) ; for ( int j = 0 ; j < n . connections . size ( ) ; j ++ ) { int edgeCornerIdx = n . connections . get ( j ) . dst . index ; int outputIdx = c2n . get ( edgeCornerIdx ) ; if ( outputIdx == - 1 ) { throw new IllegalArgumentException ( "Edge to node not in the graph. n.idx=" + n . index + " conn.idx=" + edgeCornerIdx ) ; } gn . edges [ j ] = graph . corners . get ( outputIdx ) ; } } // ensure arrays are all - 1 again for sanity checks
for ( int i = 0 ; i < graph . corners . size ; i ++ ) { ChessboardCornerGraph . Node gn = graph . corners . get ( i ) ; int indexCorner = n2c . get ( gn . index ) ; c2n . data [ indexCorner ] = - 1 ; n2c . data [ gn . index ] = - 1 ; } if ( graph . corners . size <= 1 ) clusters . removeTail ( ) ; } |
public class ServerGroupDAOImpl { /** * Turns a server group DMR model into a strongly typed entity
* @ param groupName
* @ param model
* @ return */
private ServerGroupRecord model2ServerGroup ( String groupName , ModelNode model ) { } } | ServerGroupRecord record = factory . serverGroup ( ) . as ( ) ; record . setName ( groupName ) ; record . setProfileName ( model . get ( "profile" ) . asString ( ) ) ; record . setSocketBinding ( model . get ( "socket-binding-group" ) . asString ( ) ) ; Jvm jvm = ModelAdapter . model2JVM ( factory , model ) ; if ( jvm != null ) jvm . setInherited ( false ) ; // on this level they can ' t inherit from anyone
record . setJvm ( jvm ) ; List < PropertyRecord > propertyRecords = ModelAdapter . model2Property ( factory , model ) ; record . setProperties ( propertyRecords ) ; return record ; |
public class AfplibPackageImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
public EEnum getPGPRGSHside ( ) { } } | if ( pgprgsHsideEEnum == null ) { pgprgsHsideEEnum = ( EEnum ) EPackage . Registry . INSTANCE . getEPackage ( AfplibPackage . eNS_URI ) . getEClassifiers ( ) . get ( 126 ) ; } return pgprgsHsideEEnum ; |
public class RequestClientOptions { /** * Copy the internal states of this < code > RequestClientOptions < / code > to the
* target < code > RequestClientOptions < / code > . */
void copyTo ( RequestClientOptions target ) { } } | target . setReadLimit ( getReadLimit ( ) ) ; for ( Marker marker : Marker . values ( ) ) target . putClientMarker ( marker , getClientMarker ( marker ) ) ; |
public class ApiOvhCloud { /** * Get SSH key
* REST : GET / cloud / project / { serviceName } / sshkey / { keyId }
* @ param keyId [ required ] SSH key id
* @ param serviceName [ required ] Project name */
public OvhSshKeyDetail project_serviceName_sshkey_keyId_GET ( String serviceName , String keyId ) throws IOException { } } | String qPath = "/cloud/project/{serviceName}/sshkey/{keyId}" ; StringBuilder sb = path ( qPath , serviceName , keyId ) ; String resp = exec ( qPath , "GET" , sb . toString ( ) , null ) ; return convertTo ( resp , OvhSshKeyDetail . class ) ; |
public class GobblinClusterManager { /** * In separate controller mode , one controller will manage manager ' s HA , the other will handle the job dispatching and
* work unit assignment . */
@ VisibleForTesting void initializeHelixManager ( ) { } } | this . multiManager = new GobblinHelixMultiManager ( this . config , new Function < Void , MessageHandlerFactory > ( ) { @ Override public MessageHandlerFactory apply ( Void aVoid ) { return GobblinClusterManager . this . getUserDefinedMessageHandlerFactory ( ) ; } } , this . eventBus , stopStatus ) ; this . multiManager . addLeadershipChangeAwareComponent ( this ) ; |
public class Choice2 { /** * { @ inheritDoc }
* @ param lazyAppFn */
@ Override public < C > Lazy < ? extends Monad < C , Choice2 < A , ? > > > lazyZip ( Lazy < ? extends Applicative < Function < ? super B , ? extends C > , Choice2 < A , ? > > > lazyAppFn ) { } } | return match ( a -> lazy ( a ( a ) ) , b -> lazyAppFn . fmap ( choiceF -> choiceF . < C > fmap ( f -> f . apply ( b ) ) . coerce ( ) ) ) ; |
public class InternalXbaseWithAnnotationsParser { /** * InternalXbaseWithAnnotations . g : 4593:1 : ruleXFeatureCall returns [ EObject current = null ] : ( ( ) ( otherlv _ 1 = ' < ' ( ( lv _ typeArguments _ 2_0 = ruleJvmArgumentTypeReference ) ) ( otherlv _ 3 = ' , ' ( ( lv _ typeArguments _ 4_0 = ruleJvmArgumentTypeReference ) ) ) * otherlv _ 5 = ' > ' ) ? ( ( ruleIdOrSuper ) ) ( ( ( ( ' ( ' ) ) = > ( lv _ explicitOperationCall _ 7_0 = ' ( ' ) ) ( ( ( ( ( ) ( ( ( ruleJvmFormalParameter ) ) ( ' , ' ( ( ruleJvmFormalParameter ) ) ) * ) ? ( ( ' | ' ) ) ) ) = > ( lv _ featureCallArguments _ 8_0 = ruleXShortClosure ) ) | ( ( ( lv _ featureCallArguments _ 9_0 = ruleXExpression ) ) ( otherlv _ 10 = ' , ' ( ( lv _ featureCallArguments _ 11_0 = ruleXExpression ) ) ) * ) ) ? otherlv _ 12 = ' ) ' ) ? ( ( ( ( ) ' [ ' ) ) = > ( lv _ featureCallArguments _ 13_0 = ruleXClosure ) ) ? ) ; */
public final EObject ruleXFeatureCall ( ) throws RecognitionException { } } | EObject current = null ; Token otherlv_1 = null ; Token otherlv_3 = null ; Token otherlv_5 = null ; Token lv_explicitOperationCall_7_0 = null ; Token otherlv_10 = null ; Token otherlv_12 = null ; EObject lv_typeArguments_2_0 = null ; EObject lv_typeArguments_4_0 = null ; EObject lv_featureCallArguments_8_0 = null ; EObject lv_featureCallArguments_9_0 = null ; EObject lv_featureCallArguments_11_0 = null ; EObject lv_featureCallArguments_13_0 = null ; enterRule ( ) ; try { // InternalXbaseWithAnnotations . g : 4599:2 : ( ( ( ) ( otherlv _ 1 = ' < ' ( ( lv _ typeArguments _ 2_0 = ruleJvmArgumentTypeReference ) ) ( otherlv _ 3 = ' , ' ( ( lv _ typeArguments _ 4_0 = ruleJvmArgumentTypeReference ) ) ) * otherlv _ 5 = ' > ' ) ? ( ( ruleIdOrSuper ) ) ( ( ( ( ' ( ' ) ) = > ( lv _ explicitOperationCall _ 7_0 = ' ( ' ) ) ( ( ( ( ( ) ( ( ( ruleJvmFormalParameter ) ) ( ' , ' ( ( ruleJvmFormalParameter ) ) ) * ) ? ( ( ' | ' ) ) ) ) = > ( lv _ featureCallArguments _ 8_0 = ruleXShortClosure ) ) | ( ( ( lv _ featureCallArguments _ 9_0 = ruleXExpression ) ) ( otherlv _ 10 = ' , ' ( ( lv _ featureCallArguments _ 11_0 = ruleXExpression ) ) ) * ) ) ? otherlv _ 12 = ' ) ' ) ? ( ( ( ( ) ' [ ' ) ) = > ( lv _ featureCallArguments _ 13_0 = ruleXClosure ) ) ? ) )
// InternalXbaseWithAnnotations . g : 4600:2 : ( ( ) ( otherlv _ 1 = ' < ' ( ( lv _ typeArguments _ 2_0 = ruleJvmArgumentTypeReference ) ) ( otherlv _ 3 = ' , ' ( ( lv _ typeArguments _ 4_0 = ruleJvmArgumentTypeReference ) ) ) * otherlv _ 5 = ' > ' ) ? ( ( ruleIdOrSuper ) ) ( ( ( ( ' ( ' ) ) = > ( lv _ explicitOperationCall _ 7_0 = ' ( ' ) ) ( ( ( ( ( ) ( ( ( ruleJvmFormalParameter ) ) ( ' , ' ( ( ruleJvmFormalParameter ) ) ) * ) ? ( ( ' | ' ) ) ) ) = > ( lv _ featureCallArguments _ 8_0 = ruleXShortClosure ) ) | ( ( ( lv _ featureCallArguments _ 9_0 = ruleXExpression ) ) ( otherlv _ 10 = ' , ' ( ( lv _ featureCallArguments _ 11_0 = ruleXExpression ) ) ) * ) ) ? otherlv _ 12 = ' ) ' ) ? ( ( ( ( ) ' [ ' ) ) = > ( lv _ featureCallArguments _ 13_0 = ruleXClosure ) ) ? )
{ // InternalXbaseWithAnnotations . g : 4600:2 : ( ( ) ( otherlv _ 1 = ' < ' ( ( lv _ typeArguments _ 2_0 = ruleJvmArgumentTypeReference ) ) ( otherlv _ 3 = ' , ' ( ( lv _ typeArguments _ 4_0 = ruleJvmArgumentTypeReference ) ) ) * otherlv _ 5 = ' > ' ) ? ( ( ruleIdOrSuper ) ) ( ( ( ( ' ( ' ) ) = > ( lv _ explicitOperationCall _ 7_0 = ' ( ' ) ) ( ( ( ( ( ) ( ( ( ruleJvmFormalParameter ) ) ( ' , ' ( ( ruleJvmFormalParameter ) ) ) * ) ? ( ( ' | ' ) ) ) ) = > ( lv _ featureCallArguments _ 8_0 = ruleXShortClosure ) ) | ( ( ( lv _ featureCallArguments _ 9_0 = ruleXExpression ) ) ( otherlv _ 10 = ' , ' ( ( lv _ featureCallArguments _ 11_0 = ruleXExpression ) ) ) * ) ) ? otherlv _ 12 = ' ) ' ) ? ( ( ( ( ) ' [ ' ) ) = > ( lv _ featureCallArguments _ 13_0 = ruleXClosure ) ) ? )
// InternalXbaseWithAnnotations . g : 4601:3 : ( ) ( otherlv _ 1 = ' < ' ( ( lv _ typeArguments _ 2_0 = ruleJvmArgumentTypeReference ) ) ( otherlv _ 3 = ' , ' ( ( lv _ typeArguments _ 4_0 = ruleJvmArgumentTypeReference ) ) ) * otherlv _ 5 = ' > ' ) ? ( ( ruleIdOrSuper ) ) ( ( ( ( ' ( ' ) ) = > ( lv _ explicitOperationCall _ 7_0 = ' ( ' ) ) ( ( ( ( ( ) ( ( ( ruleJvmFormalParameter ) ) ( ' , ' ( ( ruleJvmFormalParameter ) ) ) * ) ? ( ( ' | ' ) ) ) ) = > ( lv _ featureCallArguments _ 8_0 = ruleXShortClosure ) ) | ( ( ( lv _ featureCallArguments _ 9_0 = ruleXExpression ) ) ( otherlv _ 10 = ' , ' ( ( lv _ featureCallArguments _ 11_0 = ruleXExpression ) ) ) * ) ) ? otherlv _ 12 = ' ) ' ) ? ( ( ( ( ) ' [ ' ) ) = > ( lv _ featureCallArguments _ 13_0 = ruleXClosure ) ) ?
{ // InternalXbaseWithAnnotations . g : 4601:3 : ( )
// InternalXbaseWithAnnotations . g : 4602:4:
{ if ( state . backtracking == 0 ) { current = forceCreateModelElement ( grammarAccess . getXFeatureCallAccess ( ) . getXFeatureCallAction_0 ( ) , current ) ; } } // InternalXbaseWithAnnotations . g : 4608:3 : ( otherlv _ 1 = ' < ' ( ( lv _ typeArguments _ 2_0 = ruleJvmArgumentTypeReference ) ) ( otherlv _ 3 = ' , ' ( ( lv _ typeArguments _ 4_0 = ruleJvmArgumentTypeReference ) ) ) * otherlv _ 5 = ' > ' ) ?
int alt80 = 2 ; int LA80_0 = input . LA ( 1 ) ; if ( ( LA80_0 == 26 ) ) { alt80 = 1 ; } switch ( alt80 ) { case 1 : // InternalXbaseWithAnnotations . g : 4609:4 : otherlv _ 1 = ' < ' ( ( lv _ typeArguments _ 2_0 = ruleJvmArgumentTypeReference ) ) ( otherlv _ 3 = ' , ' ( ( lv _ typeArguments _ 4_0 = ruleJvmArgumentTypeReference ) ) ) * otherlv _ 5 = ' > '
{ otherlv_1 = ( Token ) match ( input , 26 , FOLLOW_34 ) ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { newLeafNode ( otherlv_1 , grammarAccess . getXFeatureCallAccess ( ) . getLessThanSignKeyword_1_0 ( ) ) ; } // InternalXbaseWithAnnotations . g : 4613:4 : ( ( lv _ typeArguments _ 2_0 = ruleJvmArgumentTypeReference ) )
// InternalXbaseWithAnnotations . g : 4614:5 : ( lv _ typeArguments _ 2_0 = ruleJvmArgumentTypeReference )
{ // InternalXbaseWithAnnotations . g : 4614:5 : ( lv _ typeArguments _ 2_0 = ruleJvmArgumentTypeReference )
// InternalXbaseWithAnnotations . g : 4615:6 : lv _ typeArguments _ 2_0 = ruleJvmArgumentTypeReference
{ if ( state . backtracking == 0 ) { newCompositeNode ( grammarAccess . getXFeatureCallAccess ( ) . getTypeArgumentsJvmArgumentTypeReferenceParserRuleCall_1_1_0 ( ) ) ; } pushFollow ( FOLLOW_35 ) ; lv_typeArguments_2_0 = ruleJvmArgumentTypeReference ( ) ; state . _fsp -- ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { if ( current == null ) { current = createModelElementForParent ( grammarAccess . getXFeatureCallRule ( ) ) ; } add ( current , "typeArguments" , lv_typeArguments_2_0 , "org.eclipse.xtext.xbase.Xtype.JvmArgumentTypeReference" ) ; afterParserOrEnumRuleCall ( ) ; } } } // InternalXbaseWithAnnotations . g : 4632:4 : ( otherlv _ 3 = ' , ' ( ( lv _ typeArguments _ 4_0 = ruleJvmArgumentTypeReference ) ) ) *
loop79 : do { int alt79 = 2 ; int LA79_0 = input . LA ( 1 ) ; if ( ( LA79_0 == 15 ) ) { alt79 = 1 ; } switch ( alt79 ) { case 1 : // InternalXbaseWithAnnotations . g : 4633:5 : otherlv _ 3 = ' , ' ( ( lv _ typeArguments _ 4_0 = ruleJvmArgumentTypeReference ) )
{ otherlv_3 = ( Token ) match ( input , 15 , FOLLOW_34 ) ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { newLeafNode ( otherlv_3 , grammarAccess . getXFeatureCallAccess ( ) . getCommaKeyword_1_2_0 ( ) ) ; } // InternalXbaseWithAnnotations . g : 4637:5 : ( ( lv _ typeArguments _ 4_0 = ruleJvmArgumentTypeReference ) )
// InternalXbaseWithAnnotations . g : 4638:6 : ( lv _ typeArguments _ 4_0 = ruleJvmArgumentTypeReference )
{ // InternalXbaseWithAnnotations . g : 4638:6 : ( lv _ typeArguments _ 4_0 = ruleJvmArgumentTypeReference )
// InternalXbaseWithAnnotations . g : 4639:7 : lv _ typeArguments _ 4_0 = ruleJvmArgumentTypeReference
{ if ( state . backtracking == 0 ) { newCompositeNode ( grammarAccess . getXFeatureCallAccess ( ) . getTypeArgumentsJvmArgumentTypeReferenceParserRuleCall_1_2_1_0 ( ) ) ; } pushFollow ( FOLLOW_35 ) ; lv_typeArguments_4_0 = ruleJvmArgumentTypeReference ( ) ; state . _fsp -- ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { if ( current == null ) { current = createModelElementForParent ( grammarAccess . getXFeatureCallRule ( ) ) ; } add ( current , "typeArguments" , lv_typeArguments_4_0 , "org.eclipse.xtext.xbase.Xtype.JvmArgumentTypeReference" ) ; afterParserOrEnumRuleCall ( ) ; } } } } break ; default : break loop79 ; } } while ( true ) ; otherlv_5 = ( Token ) match ( input , 27 , FOLLOW_33 ) ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { newLeafNode ( otherlv_5 , grammarAccess . getXFeatureCallAccess ( ) . getGreaterThanSignKeyword_1_3 ( ) ) ; } } break ; } // InternalXbaseWithAnnotations . g : 4662:3 : ( ( ruleIdOrSuper ) )
// InternalXbaseWithAnnotations . g : 4663:4 : ( ruleIdOrSuper )
{ // InternalXbaseWithAnnotations . g : 4663:4 : ( ruleIdOrSuper )
// InternalXbaseWithAnnotations . g : 4664:5 : ruleIdOrSuper
{ if ( state . backtracking == 0 ) { if ( current == null ) { current = createModelElement ( grammarAccess . getXFeatureCallRule ( ) ) ; } } if ( state . backtracking == 0 ) { newCompositeNode ( grammarAccess . getXFeatureCallAccess ( ) . getFeatureJvmIdentifiableElementCrossReference_2_0 ( ) ) ; } pushFollow ( FOLLOW_65 ) ; ruleIdOrSuper ( ) ; state . _fsp -- ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { afterParserOrEnumRuleCall ( ) ; } } } // InternalXbaseWithAnnotations . g : 4678:3 : ( ( ( ( ' ( ' ) ) = > ( lv _ explicitOperationCall _ 7_0 = ' ( ' ) ) ( ( ( ( ( ) ( ( ( ruleJvmFormalParameter ) ) ( ' , ' ( ( ruleJvmFormalParameter ) ) ) * ) ? ( ( ' | ' ) ) ) ) = > ( lv _ featureCallArguments _ 8_0 = ruleXShortClosure ) ) | ( ( ( lv _ featureCallArguments _ 9_0 = ruleXExpression ) ) ( otherlv _ 10 = ' , ' ( ( lv _ featureCallArguments _ 11_0 = ruleXExpression ) ) ) * ) ) ? otherlv _ 12 = ' ) ' ) ?
int alt83 = 2 ; alt83 = dfa83 . predict ( input ) ; switch ( alt83 ) { case 1 : // InternalXbaseWithAnnotations . g : 4679:4 : ( ( ( ' ( ' ) ) = > ( lv _ explicitOperationCall _ 7_0 = ' ( ' ) ) ( ( ( ( ( ) ( ( ( ruleJvmFormalParameter ) ) ( ' , ' ( ( ruleJvmFormalParameter ) ) ) * ) ? ( ( ' | ' ) ) ) ) = > ( lv _ featureCallArguments _ 8_0 = ruleXShortClosure ) ) | ( ( ( lv _ featureCallArguments _ 9_0 = ruleXExpression ) ) ( otherlv _ 10 = ' , ' ( ( lv _ featureCallArguments _ 11_0 = ruleXExpression ) ) ) * ) ) ? otherlv _ 12 = ' ) '
{ // InternalXbaseWithAnnotations . g : 4679:4 : ( ( ( ' ( ' ) ) = > ( lv _ explicitOperationCall _ 7_0 = ' ( ' ) )
// InternalXbaseWithAnnotations . g : 4680:5 : ( ( ' ( ' ) ) = > ( lv _ explicitOperationCall _ 7_0 = ' ( ' )
{ // InternalXbaseWithAnnotations . g : 4684:5 : ( lv _ explicitOperationCall _ 7_0 = ' ( ' )
// InternalXbaseWithAnnotations . g : 4685:6 : lv _ explicitOperationCall _ 7_0 = ' ( '
{ lv_explicitOperationCall_7_0 = ( Token ) match ( input , 14 , FOLLOW_37 ) ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { newLeafNode ( lv_explicitOperationCall_7_0 , grammarAccess . getXFeatureCallAccess ( ) . getExplicitOperationCallLeftParenthesisKeyword_3_0_0 ( ) ) ; } if ( state . backtracking == 0 ) { if ( current == null ) { current = createModelElement ( grammarAccess . getXFeatureCallRule ( ) ) ; } setWithLastConsumed ( current , "explicitOperationCall" , true , "(" ) ; } } } // InternalXbaseWithAnnotations . g : 4697:4 : ( ( ( ( ( ) ( ( ( ruleJvmFormalParameter ) ) ( ' , ' ( ( ruleJvmFormalParameter ) ) ) * ) ? ( ( ' | ' ) ) ) ) = > ( lv _ featureCallArguments _ 8_0 = ruleXShortClosure ) ) | ( ( ( lv _ featureCallArguments _ 9_0 = ruleXExpression ) ) ( otherlv _ 10 = ' , ' ( ( lv _ featureCallArguments _ 11_0 = ruleXExpression ) ) ) * ) ) ?
int alt82 = 3 ; alt82 = dfa82 . predict ( input ) ; switch ( alt82 ) { case 1 : // InternalXbaseWithAnnotations . g : 4698:5 : ( ( ( ( ) ( ( ( ruleJvmFormalParameter ) ) ( ' , ' ( ( ruleJvmFormalParameter ) ) ) * ) ? ( ( ' | ' ) ) ) ) = > ( lv _ featureCallArguments _ 8_0 = ruleXShortClosure ) )
{ // InternalXbaseWithAnnotations . g : 4698:5 : ( ( ( ( ) ( ( ( ruleJvmFormalParameter ) ) ( ' , ' ( ( ruleJvmFormalParameter ) ) ) * ) ? ( ( ' | ' ) ) ) ) = > ( lv _ featureCallArguments _ 8_0 = ruleXShortClosure ) )
// InternalXbaseWithAnnotations . g : 4699:6 : ( ( ( ) ( ( ( ruleJvmFormalParameter ) ) ( ' , ' ( ( ruleJvmFormalParameter ) ) ) * ) ? ( ( ' | ' ) ) ) ) = > ( lv _ featureCallArguments _ 8_0 = ruleXShortClosure )
{ // InternalXbaseWithAnnotations . g : 4724:6 : ( lv _ featureCallArguments _ 8_0 = ruleXShortClosure )
// InternalXbaseWithAnnotations . g : 4725:7 : lv _ featureCallArguments _ 8_0 = ruleXShortClosure
{ if ( state . backtracking == 0 ) { newCompositeNode ( grammarAccess . getXFeatureCallAccess ( ) . getFeatureCallArgumentsXShortClosureParserRuleCall_3_1_0_0 ( ) ) ; } pushFollow ( FOLLOW_7 ) ; lv_featureCallArguments_8_0 = ruleXShortClosure ( ) ; state . _fsp -- ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { if ( current == null ) { current = createModelElementForParent ( grammarAccess . getXFeatureCallRule ( ) ) ; } add ( current , "featureCallArguments" , lv_featureCallArguments_8_0 , "org.eclipse.xtext.xbase.Xbase.XShortClosure" ) ; afterParserOrEnumRuleCall ( ) ; } } } } break ; case 2 : // InternalXbaseWithAnnotations . g : 4743:5 : ( ( ( lv _ featureCallArguments _ 9_0 = ruleXExpression ) ) ( otherlv _ 10 = ' , ' ( ( lv _ featureCallArguments _ 11_0 = ruleXExpression ) ) ) * )
{ // InternalXbaseWithAnnotations . g : 4743:5 : ( ( ( lv _ featureCallArguments _ 9_0 = ruleXExpression ) ) ( otherlv _ 10 = ' , ' ( ( lv _ featureCallArguments _ 11_0 = ruleXExpression ) ) ) * )
// InternalXbaseWithAnnotations . g : 4744:6 : ( ( lv _ featureCallArguments _ 9_0 = ruleXExpression ) ) ( otherlv _ 10 = ' , ' ( ( lv _ featureCallArguments _ 11_0 = ruleXExpression ) ) ) *
{ // InternalXbaseWithAnnotations . g : 4744:6 : ( ( lv _ featureCallArguments _ 9_0 = ruleXExpression ) )
// InternalXbaseWithAnnotations . g : 4745:7 : ( lv _ featureCallArguments _ 9_0 = ruleXExpression )
{ // InternalXbaseWithAnnotations . g : 4745:7 : ( lv _ featureCallArguments _ 9_0 = ruleXExpression )
// InternalXbaseWithAnnotations . g : 4746:8 : lv _ featureCallArguments _ 9_0 = ruleXExpression
{ if ( state . backtracking == 0 ) { newCompositeNode ( grammarAccess . getXFeatureCallAccess ( ) . getFeatureCallArgumentsXExpressionParserRuleCall_3_1_1_0_0 ( ) ) ; } pushFollow ( FOLLOW_6 ) ; lv_featureCallArguments_9_0 = ruleXExpression ( ) ; state . _fsp -- ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { if ( current == null ) { current = createModelElementForParent ( grammarAccess . getXFeatureCallRule ( ) ) ; } add ( current , "featureCallArguments" , lv_featureCallArguments_9_0 , "org.eclipse.xtext.xbase.Xbase.XExpression" ) ; afterParserOrEnumRuleCall ( ) ; } } } // InternalXbaseWithAnnotations . g : 4763:6 : ( otherlv _ 10 = ' , ' ( ( lv _ featureCallArguments _ 11_0 = ruleXExpression ) ) ) *
loop81 : do { int alt81 = 2 ; int LA81_0 = input . LA ( 1 ) ; if ( ( LA81_0 == 15 ) ) { alt81 = 1 ; } switch ( alt81 ) { case 1 : // InternalXbaseWithAnnotations . g : 4764:7 : otherlv _ 10 = ' , ' ( ( lv _ featureCallArguments _ 11_0 = ruleXExpression ) )
{ otherlv_10 = ( Token ) match ( input , 15 , FOLLOW_9 ) ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { newLeafNode ( otherlv_10 , grammarAccess . getXFeatureCallAccess ( ) . getCommaKeyword_3_1_1_1_0 ( ) ) ; } // InternalXbaseWithAnnotations . g : 4768:7 : ( ( lv _ featureCallArguments _ 11_0 = ruleXExpression ) )
// InternalXbaseWithAnnotations . g : 4769:8 : ( lv _ featureCallArguments _ 11_0 = ruleXExpression )
{ // InternalXbaseWithAnnotations . g : 4769:8 : ( lv _ featureCallArguments _ 11_0 = ruleXExpression )
// InternalXbaseWithAnnotations . g : 4770:9 : lv _ featureCallArguments _ 11_0 = ruleXExpression
{ if ( state . backtracking == 0 ) { newCompositeNode ( grammarAccess . getXFeatureCallAccess ( ) . getFeatureCallArgumentsXExpressionParserRuleCall_3_1_1_1_1_0 ( ) ) ; } pushFollow ( FOLLOW_6 ) ; lv_featureCallArguments_11_0 = ruleXExpression ( ) ; state . _fsp -- ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { if ( current == null ) { current = createModelElementForParent ( grammarAccess . getXFeatureCallRule ( ) ) ; } add ( current , "featureCallArguments" , lv_featureCallArguments_11_0 , "org.eclipse.xtext.xbase.Xbase.XExpression" ) ; afterParserOrEnumRuleCall ( ) ; } } } } break ; default : break loop81 ; } } while ( true ) ; } } break ; } otherlv_12 = ( Token ) match ( input , 16 , FOLLOW_66 ) ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { newLeafNode ( otherlv_12 , grammarAccess . getXFeatureCallAccess ( ) . getRightParenthesisKeyword_3_2 ( ) ) ; } } break ; } // InternalXbaseWithAnnotations . g : 4795:3 : ( ( ( ( ) ' [ ' ) ) = > ( lv _ featureCallArguments _ 13_0 = ruleXClosure ) ) ?
int alt84 = 2 ; alt84 = dfa84 . predict ( input ) ; switch ( alt84 ) { case 1 : // InternalXbaseWithAnnotations . g : 4796:4 : ( ( ( ) ' [ ' ) ) = > ( lv _ featureCallArguments _ 13_0 = ruleXClosure )
{ // InternalXbaseWithAnnotations . g : 4802:4 : ( lv _ featureCallArguments _ 13_0 = ruleXClosure )
// InternalXbaseWithAnnotations . g : 4803:5 : lv _ featureCallArguments _ 13_0 = ruleXClosure
{ if ( state . backtracking == 0 ) { newCompositeNode ( grammarAccess . getXFeatureCallAccess ( ) . getFeatureCallArgumentsXClosureParserRuleCall_4_0 ( ) ) ; } pushFollow ( FOLLOW_2 ) ; lv_featureCallArguments_13_0 = ruleXClosure ( ) ; state . _fsp -- ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { if ( current == null ) { current = createModelElementForParent ( grammarAccess . getXFeatureCallRule ( ) ) ; } add ( current , "featureCallArguments" , lv_featureCallArguments_13_0 , "org.eclipse.xtext.xbase.Xbase.XClosure" ) ; afterParserOrEnumRuleCall ( ) ; } } } break ; } } } if ( state . backtracking == 0 ) { leaveRule ( ) ; } } catch ( RecognitionException re ) { recover ( input , re ) ; appendSkippedTokens ( ) ; } finally { } return current ; |
public class SimpleNotificationDelegate { /** * Called when a notification is received
* @ param context the service context
* @ param notification the notification intent */
@ Override public void onReceive ( Context context , Intent notification ) { } } | GoogleCloudMessaging gcm = GoogleCloudMessaging . getInstance ( context ) ; String messageType = gcm . getMessageType ( notification ) ; Bundle extras = notification . getExtras ( ) ; if ( ! extras . isEmpty ( ) && GoogleCloudMessaging . MESSAGE_TYPE_MESSAGE . equals ( messageType ) ) { showNotification ( context , extras ) ; } |
public class MultiStringDistance { /** * Lazily prepare a string . Ie , if it ' s already a
* MultiStringWrapper , do nothing , otherwise use prepare ( ) to
* convert to a MultiStringWrapper . */
protected MultiStringWrapper asMultiStringWrapper ( StringWrapper w ) { } } | if ( w instanceof MultiStringWrapper ) return ( MultiStringWrapper ) w ; else return ( MultiStringWrapper ) prepare ( w . unwrap ( ) ) ; |
public class Requests { /** * Gets the Internet Protocol ( IP ) address of the end - client that sent the specified request .
* It will try to get HTTP head " X - forwarded - for " or " X - Real - IP " from the last proxy to get the request first , if not found , try to get
* it directly by { @ link HttpServletRequest # getRemoteAddr ( ) } .
* @ param request the specified request
* @ return the IP address of the end - client sent the specified request */
public static String getRemoteAddr ( final HttpServletRequest request ) { } } | String ret = request . getHeader ( "X-forwarded-for" ) ; if ( StringUtils . isBlank ( ret ) ) { ret = request . getHeader ( "X-Real-IP" ) ; } if ( StringUtils . isBlank ( ret ) ) { return request . getRemoteAddr ( ) ; } return ret . split ( "," ) [ 0 ] ; |
public class FileManagerImpl { /** * / * Try to allocate a block from a quick list . If the quick list contains
* insufficient information , allocate space from the tail . */
private long allocate_from_ql ( int request_size ) throws IOException { } } | Block block1 ; int block_size ; int list_index ; small_requests ++ ; list_index = calculate_list_index_for_alloc ( request_size ) ; if ( ql_heads [ list_index ] . length > 0 ) { // quick list is nonempty
block1 = ql_heads [ list_index ] . first_block ; ql_heads [ list_index ] . length -- ; if ( ql_heads [ list_index ] . length == 0 ) { nonempty_lists -- ; } ql_heads [ list_index ] . first_block = block1 . next ; allocated_blocks ++ ; free_blocks -- ; ql_hits ++ ; allocated_words += block1 . size ; free_words -= block1 . size ; // do the following as late as possible
seek_and_count ( block1 . address ) ; writeInt ( - block1 . size ) ; return ( block1 . address + HDR_SIZE ) ; } else { // allocate from tail
block_size = calculate_block_size ( list_index ) ; return allocate_from_tail ( block_size ) ; } |
public class ParameterInjectionPointImpl { /** * Creates an injection point without firing the { @ link ProcessInjectionPoint } event . */
public static < T , X > ParameterInjectionPointImpl < T , X > silent ( ParameterInjectionPointAttributes < T , X > attributes ) { } } | return new ParameterInjectionPointImpl < T , X > ( attributes ) ; |
public class CompiledFEELSemanticMappings { /** * FEEL spec Table 39 */
public static Boolean ne ( Object left , Object right ) { } } | return not ( EvalHelper . isEqual ( left , right , null ) ) ; |
public class PhotosInterface { /** * Get the context for the specified photo .
* This method does not require authentication .
* @ param photoId
* The photo ID
* @ return The PhotoContext
* @ throws FlickrException */
public PhotoContext getContext ( String photoId ) throws FlickrException { } } | Map < String , Object > parameters = new HashMap < String , Object > ( ) ; parameters . put ( "method" , METHOD_GET_CONTEXT ) ; parameters . put ( "photo_id" , photoId ) ; Response response = transport . get ( transport . getPath ( ) , parameters , apiKey , sharedSecret ) ; if ( response . isError ( ) ) { throw new FlickrException ( response . getErrorCode ( ) , response . getErrorMessage ( ) ) ; } PhotoContext photoContext = new PhotoContext ( ) ; Collection < Element > payload = response . getPayloadCollection ( ) ; for ( Element payloadElement : payload ) { String tagName = payloadElement . getTagName ( ) ; if ( tagName . equals ( "prevphoto" ) ) { Photo photo = new Photo ( ) ; photo . setId ( payloadElement . getAttribute ( "id" ) ) ; photo . setSecret ( payloadElement . getAttribute ( "secret" ) ) ; photo . setTitle ( payloadElement . getAttribute ( "title" ) ) ; photo . setFarm ( payloadElement . getAttribute ( "farm" ) ) ; photo . setUrl ( payloadElement . getAttribute ( "url" ) ) ; photoContext . setPreviousPhoto ( photo ) ; } else if ( tagName . equals ( "nextphoto" ) ) { Photo photo = new Photo ( ) ; photo . setId ( payloadElement . getAttribute ( "id" ) ) ; photo . setSecret ( payloadElement . getAttribute ( "secret" ) ) ; photo . setTitle ( payloadElement . getAttribute ( "title" ) ) ; photo . setFarm ( payloadElement . getAttribute ( "farm" ) ) ; photo . setUrl ( payloadElement . getAttribute ( "url" ) ) ; photoContext . setNextPhoto ( photo ) ; } } return photoContext ; |
public class IdentityHashMap { /** * Returns the appropriate capacity for the given expected maximum size .
* Returns the smallest power of two between MINIMUM _ CAPACITY and
* MAXIMUM _ CAPACITY , inclusive , that is greater than ( 3 *
* expectedMaxSize ) / 2 , if such a number exists . Otherwise returns
* MAXIMUM _ CAPACITY . */
private static int capacity ( int expectedMaxSize ) { } } | // assert expectedMaxSize > = 0;
return ( expectedMaxSize > MAXIMUM_CAPACITY / 3 ) ? MAXIMUM_CAPACITY : ( expectedMaxSize <= 2 * MINIMUM_CAPACITY / 3 ) ? MINIMUM_CAPACITY : Integer . highestOneBit ( expectedMaxSize + ( expectedMaxSize << 1 ) ) ; |
public class BaseOp { /** * 删除文件或者目录
* @ param request
* 删除文件或者目录的请求 , 类型为DelFileRequest或者DelFolderRequest
* @ return JSON格式的字符串 , 格式为 { " code " : $ code , " message " : " $ mess " } , code为0表示成功 ,
* 其他为失败 , message为success或者失败原因
* @ throws AbstractCosException
* SDK定义的COS异常 , 通常是输入参数有误或者环境问题 ( 如网络不通 ) */
protected String delBase ( final AbstractDelRequest request ) throws AbstractCosException { } } | request . check_param ( ) ; String url = buildUrl ( request ) ; String sign = Sign . getOneEffectiveSign ( request . getBucketName ( ) , request . getCosPath ( ) , this . cred ) ; HttpRequest httpRequest = new HttpRequest ( ) ; httpRequest . setUrl ( url ) ; httpRequest . addHeader ( RequestHeaderKey . Authorization , sign ) ; httpRequest . addHeader ( RequestHeaderKey . Content_TYPE , RequestHeaderValue . ContentType . JSON ) ; httpRequest . addHeader ( RequestHeaderKey . USER_AGENT , this . config . getUserAgent ( ) ) ; httpRequest . addParam ( RequestBodyKey . OP , RequestBodyValue . OP . DELETE ) ; httpRequest . setMethod ( HttpMethod . POST ) ; httpRequest . setContentType ( HttpContentType . APPLICATION_JSON ) ; return httpClient . sendHttpRequest ( httpRequest ) ; |
public class KeyPairMarshaller { /** * Marshall the given parameter object . */
public void marshall ( KeyPair keyPair , ProtocolMarshaller protocolMarshaller ) { } } | if ( keyPair == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( keyPair . getName ( ) , NAME_BINDING ) ; protocolMarshaller . marshall ( keyPair . getArn ( ) , ARN_BINDING ) ; protocolMarshaller . marshall ( keyPair . getSupportCode ( ) , SUPPORTCODE_BINDING ) ; protocolMarshaller . marshall ( keyPair . getCreatedAt ( ) , CREATEDAT_BINDING ) ; protocolMarshaller . marshall ( keyPair . getLocation ( ) , LOCATION_BINDING ) ; protocolMarshaller . marshall ( keyPair . getResourceType ( ) , RESOURCETYPE_BINDING ) ; protocolMarshaller . marshall ( keyPair . getTags ( ) , TAGS_BINDING ) ; protocolMarshaller . marshall ( keyPair . getFingerprint ( ) , FINGERPRINT_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class PriceFeedItem { /** * Gets the tableRows value for this PriceFeedItem .
* @ return tableRows * The rows in this price extension . Minimum number of items allowed
* is 3 and the maximum number
* is 8. */
public com . google . api . ads . adwords . axis . v201809 . cm . PriceTableRow [ ] getTableRows ( ) { } } | return tableRows ; |
public class Http2Channel { /** * Creates a strema using a HEADERS frame
* @ param requestHeaders
* @ return
* @ throws IOException */
public synchronized Http2HeadersStreamSinkChannel createStream ( HeaderMap requestHeaders ) throws IOException { } } | if ( ! isClient ( ) ) { throw UndertowMessages . MESSAGES . headersStreamCanOnlyBeCreatedByClient ( ) ; } if ( ! isOpen ( ) ) { throw UndertowMessages . MESSAGES . channelIsClosed ( ) ; } sendConcurrentStreamsAtomicUpdater . incrementAndGet ( this ) ; if ( sendMaxConcurrentStreams > 0 && sendConcurrentStreams > sendMaxConcurrentStreams ) { throw UndertowMessages . MESSAGES . streamLimitExceeded ( ) ; } int streamId = streamIdCounter ; streamIdCounter += 2 ; Http2HeadersStreamSinkChannel http2SynStreamStreamSinkChannel = new Http2HeadersStreamSinkChannel ( this , streamId , requestHeaders ) ; currentStreams . put ( streamId , new StreamHolder ( http2SynStreamStreamSinkChannel ) ) ; return http2SynStreamStreamSinkChannel ; |
public class SNAXParser { /** * Return a new SNAXParser using the specified model .
* @ param factory XMLInputFactory to use when creating input streams
* @ param model NodeModel that defines the state machine to use when parsing
* @ return a new parser */
public static < T > SNAXParser < T > createParser ( XMLInputFactory factory , NodeModel < T > model ) { } } | return new SNAXParser < T > ( factory , model ) ; |
public class XsdDataItem { /** * Derive XML schema attributes from a COBOL usage clause . This gives a
* rough approximation of the XSD type because the picture clause usually
* carries info that needs to be further analyzed to determine a more
* precise type . If no usage clause , we assume there will be a picture
* clause .
* @ param usage COBOL usage clause */
private void setAttributesFromUsage ( final Usage usage ) { } } | switch ( usage ) { case BINARY : _cobolType = CobolTypes . BINARY_ITEM ; _xsdType = XsdType . INTEGER ; break ; case NATIVEBINARY : _cobolType = CobolTypes . NATIVE_BINARY_ITEM ; _xsdType = XsdType . INTEGER ; break ; case SINGLEFLOAT : _cobolType = CobolTypes . SINGLE_FLOAT_ITEM ; _xsdType = XsdType . FLOAT ; _minStorageLength = 4 ; break ; case DOUBLEFLOAT : _cobolType = CobolTypes . DOUBLE_FLOAT_ITEM ; _xsdType = XsdType . DOUBLE ; _minStorageLength = 8 ; break ; case PACKEDDECIMAL : _cobolType = CobolTypes . PACKED_DECIMAL_ITEM ; _xsdType = XsdType . DECIMAL ; break ; case INDEX : _cobolType = CobolTypes . INDEX_ITEM ; _xsdType = XsdType . HEXBINARY ; _minStorageLength = 4 ; break ; case POINTER : _cobolType = CobolTypes . POINTER_ITEM ; _xsdType = XsdType . HEXBINARY ; _minStorageLength = 4 ; break ; case PROCEDUREPOINTER : _cobolType = CobolTypes . PROC_POINTER_ITEM ; _xsdType = XsdType . HEXBINARY ; _minStorageLength = 8 ; break ; case FUNCTIONPOINTER : _cobolType = CobolTypes . FUNC_POINTER_ITEM ; _xsdType = XsdType . HEXBINARY ; _minStorageLength = 4 ; break ; case DISPLAY : _cobolType = CobolTypes . ALPHANUMERIC_ITEM ; _xsdType = XsdType . STRING ; break ; case DISPLAY1 : _cobolType = CobolTypes . DBCS_ITEM ; _xsdType = XsdType . STRING ; break ; case NATIONAL : _cobolType = CobolTypes . NATIONAL_ITEM ; _xsdType = XsdType . STRING ; break ; default : _log . error ( "Unrecognized usage clause " + toString ( ) ) ; } |
public class Model { /** * / * package */
Properties getJavadoc ( ) { } } | if ( javadoc != null ) return javadoc ; synchronized ( this ) { if ( javadoc != null ) return javadoc ; // load
Properties p = new Properties ( ) ; InputStream is = type . getClassLoader ( ) . getResourceAsStream ( type . getName ( ) . replace ( '$' , '/' ) . replace ( '.' , '/' ) + ".javadoc" ) ; if ( is != null ) { try { try { p . load ( is ) ; } finally { is . close ( ) ; } } catch ( IOException e ) { throw new RuntimeException ( "Unable to load javadoc for " + type , e ) ; } } javadoc = p ; return javadoc ; } |
public class Vector3d { /** * Adds this vector to v1 and places the result in this vector .
* @ param v1
* right - hand vector */
public void add ( Vector3d v1 ) { } } | x += v1 . x ; y += v1 . y ; z += v1 . z ; |
public class BaseSparseNDArrayCOO { /** * Return a copy of the values included in the array .
* / ! \ Change this DataBuffer won ' t change the ndarray !
* @ return an array containing the values */
public DataBuffer getIncludedValues ( ) { } } | List < Double > val = new ArrayList < > ( ) ; for ( int i = 0 ; i < values . length ( ) ; i ++ ) { boolean isIn = true ; int idxNotFixed = 0 ; int [ ] idx = getUnderlyingIndicesOf ( i ) . asInt ( ) ; for ( int dim = 0 ; dim < idx . length ; dim ++ ) { if ( flags ( ) [ dim ] == 1 ) { if ( sparseOffsets ( ) [ dim ] != idx [ dim ] ) { isIn = false ; break ; } } else { int lowerBound = sparseOffsets ( ) [ dim ] ; long upperBound = sparseOffsets ( ) [ dim ] + shape ( ) [ idxNotFixed ] ; if ( ! ( idx [ dim ] >= lowerBound && idx [ dim ] < upperBound ) ) { isIn = false ; break ; } idxNotFixed ++ ; } } if ( isIn ) { val . add ( values . getDouble ( i ) ) ; } } return Nd4j . createBuffer ( Doubles . toArray ( val ) ) ; |
public class EncodedProxyTask { /** * Get the next ( String ) param .
* Typically this is overidden in the concrete implementation .
* @ param strName The param name ( in most implementations this is optional ) .
* @ return The next param as a string . */
public String getNextStringParam ( String strName ) { } } | String string = this . getProperty ( strName ) ; if ( NULL . equals ( string ) ) string = null ; return string ; |
public class IndexSnapshotRequestConfig { /** * parse ranges */
private Collection < PartitionRanges > parsePartitionRanges ( JSONObject jsData ) { } } | if ( jsData != null ) { try { JSONObject partitionObj = jsData . getJSONObject ( "partitionRanges" ) ; Iterator < String > partitionKey = partitionObj . keys ( ) ; ImmutableList . Builder < PartitionRanges > partitionRangesBuilder = ImmutableList . builder ( ) ; while ( partitionKey . hasNext ( ) ) { String pidStr = partitionKey . next ( ) ; JSONObject rangeObj = partitionObj . getJSONObject ( pidStr ) ; Iterator < String > rangeKey = rangeObj . keys ( ) ; ImmutableSortedMap . Builder < Integer , Integer > rangeBuilder = ImmutableSortedMap . naturalOrder ( ) ; while ( rangeKey . hasNext ( ) ) { String rangeStartStr = rangeKey . next ( ) ; int rangeStart = Integer . parseInt ( rangeStartStr ) ; int rangeEnd = rangeObj . getInt ( rangeStartStr ) ; rangeBuilder . put ( rangeStart , rangeEnd ) ; } partitionRangesBuilder . add ( new PartitionRanges ( Integer . parseInt ( pidStr ) , rangeBuilder . build ( ) ) ) ; } return partitionRangesBuilder . build ( ) ; } catch ( JSONException e ) { SNAP_LOG . warn ( "Failed to parse partition ranges" , e ) ; } } return null ; |
public class KickflipApiClient { /** * Verify HTTP response was successful
* and pass to handleKickflipResponse .
* If we have an HttpResponse at all , it means
* the status code was < 300 , so as far as http inspection
* goes , this method simply enforces status code of 200
* @ param response
* @ param responseClass
* @ param cb Must not be null
* @ throws IOException */
private void handleHttpResponse ( HttpResponse response , Class < ? extends Response > responseClass , KickflipCallback cb ) throws IOException { } } | // Object parsedResponse = response . parseAs ( responseClass ) ;
if ( isSuccessResponse ( response ) ) { // Http Success
handleKickflipResponse ( response , responseClass , cb ) ; // cb . onSuccess ( responseClass . cast ( parsedResponse ) ) ;
} else { // Http Failure
if ( VERBOSE ) Log . i ( TAG , String . format ( "RESPONSE (F): %s body: %s" , shortenUrlString ( response . getRequest ( ) . getUrl ( ) . toString ( ) ) , response . getContent ( ) . toString ( ) ) ) ; postExceptionToCallback ( cb , UNKNOWN_ERROR_CODE ) ; } |
public class GosuStringUtil { /** * Checks if the String contains any character in the given set of characters .
* A < code > null < / code > String will return < code > false < / code > . A < code > null < / code > search string will return
* < code > false < / code > .
* < pre >
* GosuStringUtil . containsAny ( null , * ) = false
* GosuStringUtil . containsAny ( " " , * ) = false
* GosuStringUtil . containsAny ( * , null ) = false
* GosuStringUtil . containsAny ( * , " " ) = false
* GosuStringUtil . containsAny ( " zzabyycdxx " , " za " ) = true
* GosuStringUtil . containsAny ( " zzabyycdxx " , " by " ) = true
* GosuStringUtil . containsAny ( " aba " , " z " ) = false
* < / pre >
* @ param str
* the String to check , may be null
* @ param searchChars
* the chars to search for , may be null
* @ return the < code > true < / code > if any of the chars are found , < code > false < / code > if no match or null input
* @ since 2.4 */
public static boolean containsAny ( String str , String searchChars ) { } } | if ( searchChars == null ) { return false ; } return containsAny ( str , searchChars . toCharArray ( ) ) ; |
public class ComputerVisionImpl { /** * This operation extracts a rich set of visual features based on the image content .
* @ param image An image stream .
* @ param analyzeImageInStreamOptionalParameter the object representing the optional parameters to be set before calling this API
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ throws ComputerVisionErrorException thrown if the request is rejected by server
* @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
* @ return the ImageAnalysis object if successful . */
public ImageAnalysis analyzeImageInStream ( byte [ ] image , AnalyzeImageInStreamOptionalParameter analyzeImageInStreamOptionalParameter ) { } } | return analyzeImageInStreamWithServiceResponseAsync ( image , analyzeImageInStreamOptionalParameter ) . toBlocking ( ) . single ( ) . body ( ) ; |
public class Functions { /** * Use an existing instance of a type that implements Unit to create a KleisliM arrow for that type
* < pre >
* { @ code
* Seq < Integer > myList = Seq . of ( 1,2,3 ) ;
* Fn1 < ? super String , ? extends Seq < String > > arrow = Functions . arrowUnit ( myList ) ;
* Seq < String > list = arrow . applyHKT ( " hello world " ) ;
* < / pre >
* @ param w
* @ param < T >
* @ param < W >
* @ return */
public static final < T , W extends Unit < T > > Function1 < ? super T , ? extends W > arrowUnit ( Unit < ? > w ) { } } | return t -> ( W ) w . unit ( t ) ; |
public class WorkbenchEntry { /** * Returns the minimum time at which some URL in some { @ link VisitState } of the visit - state queue
* of this workbench entry can be accessed .
* It is computed by maximizing { @ link # nextFetch } with { @ link VisitState # nextFetch } of the
* top element of the visit - state queue .
* < p > Note that this method will access blindly the top element , which might cause a { @ link NullPointerException }
* if the visit - state queue is empty . Thus , great care must be exercised in placing on the workbench
* entries with a nonempty visit - state queue only .
* @ return the minimum time at which some URL in some { @ link VisitState } of this entry can be accessed . */
public synchronized long nextFetch ( ) { } } | assert visitStates . peek ( ) != null ; return Math . max ( nextFetch , visitStates . peek ( ) . nextFetch ) ; |
public class PackedBits8 { /** * Increases the size of the data array so that it can store an addition number of bits
* @ param amountBits Number of bits beyond ' size ' that you wish the array to be able to store
* @ param saveValue if true it will save the value of the array . If false it will not copy it */
public void growArray ( int amountBits , boolean saveValue ) { } } | size = size + amountBits ; int N = size / 8 + ( size % 8 == 0 ? 0 : 1 ) ; if ( N > data . length ) { // add in some buffer to avoid lots of calls to new
int extra = Math . min ( 1024 , N + 10 ) ; byte [ ] tmp = new byte [ N + extra ] ; if ( saveValue ) System . arraycopy ( data , 0 , tmp , 0 , data . length ) ; this . data = tmp ; } |
public class FutureConverter { /** * Converts { @ link org . springframework . util . concurrent . ListenableFuture } to { @ link java . util . concurrent . CompletableFuture } . */
public static < T > CompletableFuture < T > toCompletableFuture ( ListenableFuture < T > listenableFuture ) { } } | return Java8FutureUtils . createCompletableFuture ( SpringFutureUtils . createValueSourceFuture ( listenableFuture ) ) ; |
public class HllSketch { /** * Heapify the given Memory , which must be a valid HllSketch image and may have data .
* @ param srcMem the given Memory , which is read - only .
* @ return an HllSketch on the java heap . */
public static final HllSketch heapify ( final Memory srcMem ) { } } | final CurMode curMode = checkPreamble ( srcMem ) ; final HllSketch heapSketch ; if ( curMode == CurMode . HLL ) { final TgtHllType tgtHllType = extractTgtHllType ( srcMem ) ; if ( tgtHllType == TgtHllType . HLL_4 ) { heapSketch = new HllSketch ( Hll4Array . heapify ( srcMem ) ) ; } else if ( tgtHllType == TgtHllType . HLL_6 ) { heapSketch = new HllSketch ( Hll6Array . heapify ( srcMem ) ) ; } else { // Hll _ 8
heapSketch = new HllSketch ( Hll8Array . heapify ( srcMem ) ) ; } } else if ( curMode == CurMode . LIST ) { heapSketch = new HllSketch ( CouponList . heapifyList ( srcMem ) ) ; } else { heapSketch = new HllSketch ( CouponHashSet . heapifySet ( srcMem ) ) ; } return heapSketch ; |
public class ListDocumentVersionsResult { /** * The document versions .
* @ return The document versions . */
public java . util . List < DocumentVersionInfo > getDocumentVersions ( ) { } } | if ( documentVersions == null ) { documentVersions = new com . amazonaws . internal . SdkInternalList < DocumentVersionInfo > ( ) ; } return documentVersions ; |
public class SimulatedSocketFactory { /** * Checks whether a socket connection is secure . This factory creates plain socket connections which are not
* considered secure .
* @ param sock the connected socket
* @ return < code > false < / code >
* @ throws IllegalArgumentException if the argument is invalid */
@ Override public final boolean isSecure ( Socket sock ) throws IllegalArgumentException { } } | if ( sock == null ) { throw new IllegalArgumentException ( "Socket may not be null." ) ; } // This check is performed last since it calls a method implemented
// by the argument object . getClass ( ) is final in java . lang . Object .
if ( sock . isClosed ( ) ) { throw new IllegalArgumentException ( "Socket is closed." ) ; } return false ; |
public class IStringBuffer { /** * get the char at a specified position in the buffer */
public char charAt ( int idx ) { } } | if ( idx < 0 ) throw new IndexOutOfBoundsException ( "idx{" + idx + "} < 0" ) ; if ( idx >= count ) throw new IndexOutOfBoundsException ( "idx{" + idx + "} >= buffer.length" ) ; return buff [ idx ] ; |
public class COSBlockOutputStream { /** * Start an asynchronous upload of the current block .
* @ throws IOException
* Problems opening the destination for upload or initializing the
* upload */
private synchronized void uploadCurrentBlock ( ) throws IOException { } } | if ( ! hasActiveBlock ( ) ) { throw new IllegalStateException ( "No active block" ) ; } LOG . debug ( "Writing block # {}" , blockCount ) ; if ( multiPartUpload == null ) { LOG . debug ( "Initiating Multipart upload" ) ; multiPartUpload = new MultiPartUpload ( ) ; } try { multiPartUpload . uploadBlockAsync ( getActiveBlock ( ) ) ; } finally { // set the block to null , so the next write will create a new block .
clearActiveBlock ( ) ; } |
public class MTPUtility { /** * Extract routing information from source [ ] , it expects that source is properly encoded routing label atleast ( 5bytes long ,
* same as dest ) . It copies data to < b > dest < / b > and swamp < i > DPC < / i > with < i > OPC < / i > .
* @ param source
* @ param dest */
public static void copyBackRouteHeader ( byte [ ] source , byte [ ] dest ) { } } | int thisPointCode = getFromSif_DPC ( source , 1 ) ; int remotePointCode = getFromSif_OPC ( source , 1 ) ; int sls = getFromSif_SLS ( source , 1 ) ; int si = getFromSif_SI ( source ) ; int ssi = getFromSif_SSI ( source ) ; writeRoutingLabel ( dest , si , ssi , sls , remotePointCode , thisPointCode ) ; |
public class FreePRequest { /** * < code > optional . alluxio . grpc . file . FreePOptions options = 2 ; < / code > */
public alluxio . grpc . FreePOptionsOrBuilder getOptionsOrBuilder ( ) { } } | return options_ == null ? alluxio . grpc . FreePOptions . getDefaultInstance ( ) : options_ ; |
public class IfcCostItemImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
@ SuppressWarnings ( "unchecked" ) @ Override public EList < IfcCostValue > getCostValues ( ) { } } | return ( EList < IfcCostValue > ) eGet ( Ifc4Package . Literals . IFC_COST_ITEM__COST_VALUES , true ) ; |
public class CsvProcessor { /** * Extract a value from the line and convert it into its java equivalent . */
private Object extractValue ( String line , int lineNumber , ColumnInfo < Object > columnInfo , String columnStr , int linePos , Object target , ParseError parseError ) { } } | Converter < Object , ? > converter = columnInfo . getConverter ( ) ; if ( alwaysTrimInput || columnInfo . isTrimInput ( ) || converter . isAlwaysTrimInput ( ) ) { columnStr = columnStr . trim ( ) ; } if ( columnStr . isEmpty ( ) && columnInfo . getDefaultValue ( ) != null ) { columnStr = columnInfo . getDefaultValue ( ) ; } if ( columnStr . isEmpty ( ) && columnInfo . isMustNotBeBlank ( ) ) { parseError . setErrorType ( ErrorType . MUST_NOT_BE_BLANK ) ; parseError . setMessage ( "field '" + columnInfo . getFieldName ( ) + "' must not be blank" ) ; assignParseErrorFields ( parseError , columnInfo , columnStr ) ; parseError . setLinePos ( linePos ) ; return null ; } try { return converter . stringToJava ( line , lineNumber , linePos , columnInfo , columnStr , parseError ) ; } catch ( ParseException e ) { parseError . setErrorType ( ErrorType . INVALID_FORMAT ) ; parseError . setMessage ( "field '" + columnInfo . getFieldName ( ) + "' parse-error: " + e . getMessage ( ) ) ; parseError . setLinePos ( linePos ) ; return null ; } catch ( Exception e ) { parseError . setErrorType ( ErrorType . INTERNAL_ERROR ) ; parseError . setMessage ( "field '" + columnInfo . getFieldName ( ) + "' error: " + e . getMessage ( ) ) ; parseError . setLinePos ( linePos ) ; return null ; } |
public class Partition { /** * Create a range partition on an { @ link AbstractLabel }
* @ param sqlgGraph
* @ param abstractLabel
* @ param name
* @ param from
* @ param to
* @ return */
static Partition createRangePartition ( SqlgGraph sqlgGraph , AbstractLabel abstractLabel , String name , String from , String to ) { } } | Preconditions . checkArgument ( ! abstractLabel . getSchema ( ) . isSqlgSchema ( ) , "createRangePartition may not be called for \"%s\"" , Topology . SQLG_SCHEMA ) ; Partition partition = new Partition ( sqlgGraph , abstractLabel , name , from , to , PartitionType . NONE , null ) ; partition . createRangePartitionOnDb ( ) ; if ( abstractLabel instanceof VertexLabel ) { TopologyManager . addVertexLabelPartition ( sqlgGraph , abstractLabel . getSchema ( ) . getName ( ) , abstractLabel . getName ( ) , name , from , to , PartitionType . NONE , null ) ; } else { TopologyManager . addEdgeLabelPartition ( sqlgGraph , abstractLabel , name , from , to , PartitionType . NONE , null ) ; } partition . committed = false ; return partition ; |
public class J2EETransactionImpl { /** * FOR internal use . This method was called after the external transaction was completed .
* @ see javax . transaction . Synchronization */
public void afterCompletion ( int status ) { } } | if ( afterCompletionCall ) return ; log . info ( "Method afterCompletion was called" ) ; try { switch ( status ) { case Status . STATUS_COMMITTED : if ( log . isDebugEnabled ( ) ) { log . debug ( "Method afterCompletion: Do commit internal odmg-tx, status of JTA-tx is " + TxUtil . getStatusString ( status ) ) ; } commit ( ) ; break ; default : log . error ( "Method afterCompletion: Do abort call on internal odmg-tx, status of JTA-tx is " + TxUtil . getStatusString ( status ) ) ; abort ( ) ; } } finally { afterCompletionCall = true ; log . info ( "Method afterCompletion finished" ) ; } |
public class DistCp { /** * Fully delete dir */
static void fullyDelete ( String dir , Configuration conf ) throws IOException { } } | if ( dir != null ) { Path tmp = new Path ( dir ) ; tmp . getFileSystem ( conf ) . delete ( tmp , true ) ; } |
public class BifurcatedConsumerSessionImpl { /** * / * ( non - Javadoc )
* @ see com . ibm . wsspi . sib . core . DestinationSession # getConnection ( ) */
public SICoreConnection getConnection ( ) throws SISessionUnavailableException , SISessionDroppedException { } } | if ( CoreSPIBifurcatedConsumerSession . tc . isEntryEnabled ( ) ) SibTr . entry ( CoreSPIBifurcatedConsumerSession . tc , "getConnection" , this ) ; // Check that this connection connection isn ' t closed .
checkNotClosed ( ) ; if ( CoreSPIBifurcatedConsumerSession . tc . isEntryEnabled ( ) ) SibTr . exit ( CoreSPIBifurcatedConsumerSession . tc , "getConnection" , _connection ) ; return _connection ; |
public class ApiOvhOverTheBox { /** * List all available offers one can migrate to
* REST : GET / overTheBox / { serviceName } / migration / offers
* @ param serviceName [ required ] The internal name of your overTheBox offer
* API beta */
public ArrayList < OvhAvailableMigrationOffer > serviceName_migration_offers_GET ( String serviceName ) throws IOException { } } | String qPath = "/overTheBox/{serviceName}/migration/offers" ; StringBuilder sb = path ( qPath , serviceName ) ; String resp = exec ( qPath , "GET" , sb . toString ( ) , null ) ; return convertTo ( resp , t5 ) ; |
public class DefaultGroovyMethods { /** * Attempts to create an Iterator for the given object by first
* converting it to a Collection .
* @ param a an array
* @ return an Iterator for the given Array .
* @ see org . codehaus . groovy . runtime . typehandling . DefaultTypeTransformation # asCollection ( java . lang . Object [ ] )
* @ since 1.6.4 */
public static < T > Iterator < T > iterator ( T [ ] a ) { } } | return DefaultTypeTransformation . asCollection ( a ) . iterator ( ) ; |
public class BatchingContextImpl { /** * In the OM implementation this method is used to flag the batching
* context so that upon the next call to executeBatch the OM transaction
* being used is rolled back . This call is only valid after an
* addIndoubtXid / executeBatch pair have been called i . e . a prepare
* has occurred .
* @ param xid */
public void updateXIDToRolledback ( PersistentTranId xid ) { } } | if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "updateXIDToRolledback" , "XID=" + xid ) ; if ( _deferredException == null ) { // We are rolling back a transaction . This should only be
// applied to prepared transactions as single - phase rollbacks
// should not get as far as the persistence layer .
if ( _state == STATE_PREPARED ) { _state = STATE_ROLLINGBACK ; } else { _deferredException = new PersistenceException ( "Cannot ROLLBACK batch as it not in the correct state! State=" + _stateToString [ _state ] ) ; } } else { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) SibTr . debug ( tc , "No work attempted as an exception has already been thrown during this batch!" ) ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "updateXIDToRolledback" ) ; |
public class AbstractKeyedObjectPool { /** * { @ inheritDoc } */
@ Override public IPooledObject < V > borrow ( K key ) throws Exception { } } | return createFuture ( PoolKey . lookup ( key ) ) . get ( ) ; |
public class CmsModuleImportExportRepository { /** * Deletes the module corresponding to the given virtual module file name . < p >
* @ param fileName the file name
* @ return true if the module could be deleted
* @ throws CmsException if something goes wrong */
public synchronized boolean deleteModule ( String fileName ) throws CmsException { } } | String moduleName = null ; boolean ok = true ; try { CmsModule module = getModuleForFileName ( fileName ) ; if ( module == null ) { LOG . error ( "Deletion request for invalid module file name: " + fileName ) ; ok = false ; return false ; } I_CmsReport report = createReport ( ) ; moduleName = module . getName ( ) ; OpenCms . getModuleManager ( ) . deleteModule ( m_adminCms , module . getName ( ) , false , report ) ; ok = ! ( report . hasWarning ( ) || report . hasError ( ) ) ; return true ; } catch ( Exception e ) { ok = false ; if ( e instanceof CmsException ) { throw ( CmsException ) e ; } if ( e instanceof RuntimeException ) { throw ( RuntimeException ) e ; } return true ; } finally { m_moduleLog . log ( moduleName , Action . deleteModule , ok ) ; } |
public class AwtImage { /** * Returns an array of every point in the image , useful if you want to be able to
* iterate over all the coordinates .
* If you want the actual pixel values of every point then use pixels ( ) . */
public Point [ ] points ( ) { } } | Point [ ] points = new Point [ width * height ] ; int k = 0 ; for ( int y = 0 ; y < height ; y ++ ) { for ( int x = 0 ; x < width ; x ++ ) { points [ k ++ ] = new Point ( x , y ) ; } } return points ; |
public class EntityAnnotationRule { /** * checks for a valid entity definition
* @ param clazz
* @ return */
private boolean checkValidClass ( Class < ? > clazz ) { } } | return clazz . isAnnotationPresent ( Entity . class ) || clazz . isAnnotationPresent ( MappedSuperclass . class ) || clazz . isAnnotationPresent ( Embeddable . class ) ; |
public class Animation { /** * Restart the animation from the beginning */
public void restart ( ) { } } | if ( frames . size ( ) == 0 ) { return ; } stopped = false ; currentFrame = 0 ; nextChange = ( int ) ( ( ( Frame ) frames . get ( 0 ) ) . duration / speed ) ; firstUpdate = true ; lastUpdate = 0 ; |
public class AbstractInstallPlanJob { /** * Install provided extension .
* @ param extensionId the identifier of the extension to install
* @ param dependency indicate if the extension is installed as a dependency
* @ param namespace the namespace where to install the extension
* @ param parentBranch the children of the parent { @ link DefaultExtensionPlanNode }
* @ throws InstallException error when trying to install provided extension */
protected void installExtension ( ExtensionId extensionId , boolean dependency , String namespace , DefaultExtensionPlanTree parentBranch ) throws InstallException { } } | if ( getRequest ( ) . isVerbose ( ) ) { if ( namespace != null ) { this . logger . info ( LOG_RESOLVE_NAMESPACE , "Resolving extension [{}] on namespace [{}]" , extensionId , namespace ) ; } else { this . logger . info ( LOG_RESOLVE , "Resolving extension [{}] on all namespaces" , extensionId ) ; } } // Check if the feature is already in the install plan
if ( checkExistingPlanNodeExtension ( extensionId , true , namespace ) ) { return ; } // Check if the feature is a core extension
checkCoreExtension ( extensionId . getId ( ) ) ; ModifableExtensionPlanNode node = installExtension ( extensionId , dependency , namespace ) ; addExtensionNode ( node ) ; parentBranch . add ( node ) ; |
public class VoiceClient { /** * Stream audio to an ongoing call .
* @ param uuid The UUID of the call , obtained from the object returned by { @ link # createCall ( Call ) } . This value can
* be obtained with { @ link CallEvent # getUuid ( ) }
* @ param streamUrl A URL of an audio file in MP3 or 16 - bit WAV format , to be streamed to the call .
* @ param loop The number of times to repeat the audio . The default value is { @ code 1 } , or you can use
* { @ code 0 } to indicate that the audio should be repeated indefinitely .
* @ return The data returned from the Voice API
* @ throws IOException if a network error occurred contacting the Nexmo Voice API .
* @ throws NexmoClientException if there was a problem with the Nexmo request or response objects . */
public StreamResponse startStream ( String uuid , String streamUrl , int loop ) throws IOException , NexmoClientException { } } | return streams . put ( new StreamRequest ( uuid , streamUrl , loop ) ) ; |
public class PackageManagerHelper { /** * Wait for bundles to become active .
* @ param httpClient Http client */
public void waitForBundlesActivation ( CloseableHttpClient httpClient ) { } } | if ( StringUtils . isBlank ( props . getBundleStatusUrl ( ) ) ) { log . debug ( "Skipping check for bundle activation state because no bundleStatusURL is defined." ) ; return ; } final int WAIT_INTERVAL_SEC = 3 ; final long CHECK_RETRY_COUNT = props . getBundleStatusWaitLimitSec ( ) / WAIT_INTERVAL_SEC ; log . info ( "Check bundle activation status..." ) ; for ( int i = 1 ; i <= CHECK_RETRY_COUNT ; i ++ ) { BundleStatusCall call = new BundleStatusCall ( httpClient , props . getBundleStatusUrl ( ) , log ) ; BundleStatus bundleStatus = executeHttpCallWithRetry ( call , 0 ) ; boolean instanceReady = true ; // check if bundles are still stopping / staring
if ( ! bundleStatus . isAllBundlesRunning ( ) ) { log . info ( "Bundles starting/stopping: " + bundleStatus . getStatusLineCompact ( ) + " - wait " + WAIT_INTERVAL_SEC + " sec " + "(max. " + props . getBundleStatusWaitLimitSec ( ) + " sec) ..." ) ; sleep ( WAIT_INTERVAL_SEC ) ; instanceReady = false ; } // check if any of the blacklisted bundles is still present
if ( instanceReady ) { for ( Pattern blacklistBundleNamePattern : props . getBundleStatusBlacklistBundleNames ( ) ) { String bundleSymbolicName = bundleStatus . getMatchingBundle ( blacklistBundleNamePattern ) ; if ( bundleSymbolicName != null ) { log . info ( "Bundle '" + bundleSymbolicName + "' is still deployed " + " - wait " + WAIT_INTERVAL_SEC + " sec " + "(max. " + props . getBundleStatusWaitLimitSec ( ) + " sec) ..." ) ; sleep ( WAIT_INTERVAL_SEC ) ; instanceReady = false ; break ; } } } // instance is ready
if ( instanceReady ) { break ; } } |
public class WCOutputStream31 { /** * @ see javax . servlet . ServletOutputStream # print ( boolean ) */
public void print ( boolean b ) throws IOException { } } | if ( this . _listener != null && ! checkIfCalledFromWLonError ( ) ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) Tr . debug ( tc , "non blocking print boolean , WriteListener enabled: " + this . _listener ) ; this . print_NonBlocking ( Boolean . toString ( b ) ) ; } else { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) Tr . debug ( tc , "print boolean" ) ; super . print ( b ) ; } |
public class AbstractGraph { /** * { @ inheritDoc } */
public Set < T > getAdjacencyList ( int vertex ) { } } | EdgeSet < T > e = getEdgeSet ( vertex ) ; return ( e == null ) ? Collections . < T > emptySet ( ) : new AdjacencyListView ( e ) ; |
public class AbstractHtmlState { /** * This method will return the map that represents the passed in < code > type < / code > . The boolean flag
* < / code > createIfNull < / code > indicates that the map should be created or not if it ' s null . This
* class defines two maps defined by < code > @ see # ATTR _ STYLE < / code > and < code > ATTR _ JAVASCRIPT < / code >
* @ param type < code > integer < / code > type indentifying the map to be created .
* @ param createIfNull < code > boolean < / code > flag indicating if the map should be created if it doesn ' t exist .
* @ return The map or null
* @ see # ATTR _ JAVASCRIPT */
public Map selectMap ( int type , boolean createIfNull ) { } } | if ( type == ATTR_JAVASCRIPT ) { if ( _jsMap == null && createIfNull ) _jsMap = new HashMap ( ) ; return _jsMap ; } return super . selectMap ( type , createIfNull ) ; |
public class BoxApiBookmark { /** * Gets a request that restores a trashed bookmark
* @ param id id of bookmark to restore
* @ return request to restore a bookmark from the trash */
public BoxRequestsBookmark . RestoreTrashedBookmark getRestoreTrashedBookmarkRequest ( String id ) { } } | BoxRequestsBookmark . RestoreTrashedBookmark request = new BoxRequestsBookmark . RestoreTrashedBookmark ( id , getBookmarkInfoUrl ( id ) , mSession ) ; return request ; |
public class AbstractAnsibleMojo { /** * Adds the configured options to the list of command strings
* @ param command the existing command */
protected void addOptions ( final List < String > command ) { } } | command . addAll ( createOption ( "-c" , connection ) ) ; command . addAll ( createOption ( "-f" , forks ) ) ; command . addAll ( createOption ( "-i" , inventory ) ) ; command . addAll ( createOption ( "-l" , limit ) ) ; command . addAll ( createOption ( "-M" , modulePath ) ) ; command . addAll ( createOption ( "--private-key" , privateKey ) ) ; command . addAll ( createOption ( "-T" , timeout ) ) ; command . addAll ( createOption ( "-u" , remoteUser ) ) ; command . addAll ( createOption ( "--vault-password-file" , vaultPasswordFile ) ) ; |
public class ZoneOffset { /** * Validates the offset fields .
* @ param hours the time - zone offset in hours , from - 18 to + 18
* @ param minutes the time - zone offset in minutes , from 0 to & plusmn ; 59
* @ param seconds the time - zone offset in seconds , from 0 to & plusmn ; 59
* @ throws DateTimeException if the offset is not in the required range */
private static void validate ( int hours , int minutes , int seconds ) { } } | if ( hours < - 18 || hours > 18 ) { throw new DateTimeException ( "Zone offset hours not in valid range: value " + hours + " is not in the range -18 to 18" ) ; } if ( hours > 0 ) { if ( minutes < 0 || seconds < 0 ) { throw new DateTimeException ( "Zone offset minutes and seconds must be positive because hours is positive" ) ; } } else if ( hours < 0 ) { if ( minutes > 0 || seconds > 0 ) { throw new DateTimeException ( "Zone offset minutes and seconds must be negative because hours is negative" ) ; } } else if ( ( minutes > 0 && seconds < 0 ) || ( minutes < 0 && seconds > 0 ) ) { throw new DateTimeException ( "Zone offset minutes and seconds must have the same sign" ) ; } if ( Math . abs ( minutes ) > 59 ) { throw new DateTimeException ( "Zone offset minutes not in valid range: abs(value) " + Math . abs ( minutes ) + " is not in the range 0 to 59" ) ; } if ( Math . abs ( seconds ) > 59 ) { throw new DateTimeException ( "Zone offset seconds not in valid range: abs(value) " + Math . abs ( seconds ) + " is not in the range 0 to 59" ) ; } if ( Math . abs ( hours ) == 18 && ( Math . abs ( minutes ) > 0 || Math . abs ( seconds ) > 0 ) ) { throw new DateTimeException ( "Zone offset not in valid range: -18:00 to +18:00" ) ; } |
public class HTTPConduit { /** * This function sets up a URL based on ENDPOINT _ ADDRESS , PATH _ INFO ,
* and QUERY _ STRING properties in the Message . The QUERY _ STRING gets
* added with a " ? " after the PATH _ INFO . If the ENDPOINT _ ADDRESS is not
* set on the Message , the endpoint address is taken from the
* " defaultEndpointURL " .
* The PATH _ INFO is only added to the endpoint address string should
* the PATH _ INFO not equal the end of the endpoint address string .
* @ param message The message holds the addressing information .
* @ return The full URL specifying the HTTP request to the endpoint .
* @ throws MalformedURLException
* @ throws URISyntaxException */
private Address setupAddress ( Message message ) throws URISyntaxException { } } | String result = ( String ) message . get ( Message . ENDPOINT_ADDRESS ) ; String pathInfo = ( String ) message . get ( Message . PATH_INFO ) ; String queryString = ( String ) message . get ( Message . QUERY_STRING ) ; setAndGetDefaultAddress ( ) ; if ( result == null ) { if ( pathInfo == null && queryString == null ) { if ( defaultAddress != null ) { message . put ( Message . ENDPOINT_ADDRESS , defaultAddress . getString ( ) ) ; } return defaultAddress ; } if ( defaultAddress != null ) { result = defaultAddress . getString ( ) ; message . put ( Message . ENDPOINT_ADDRESS , result ) ; } } // REVISIT : is this really correct ?
if ( null != pathInfo && ! result . endsWith ( pathInfo ) ) { result = result + pathInfo ; } if ( queryString != null ) { result = result + "?" + queryString ; } if ( defaultAddress == null ) { return setAndGetDefaultAddress ( result ) ; } else { return result . equals ( defaultAddress . getString ( ) ) ? defaultAddress : new Address ( result ) ; } |
public class APIDescriptor { /** * Add one or more required request parameters . If one of these request
* parameters are not present , invocation will not be possible .
* @ param aParamNames
* The names of the required HTTP parameters . May be < code > null < / code >
* or empty .
* @ return this for chaining
* @ see # addRequiredParam ( String ) */
@ Nonnull public final APIDescriptor addRequiredParams ( @ Nullable final String ... aParamNames ) { } } | if ( aParamNames != null ) for ( final String sParamName : aParamNames ) addRequiredParam ( sParamName ) ; return this ; |
public class InternalXbaseParser { /** * InternalXbase . g : 4896:1 : entryRuleXThrowExpression returns [ EObject current = null ] : iv _ ruleXThrowExpression = ruleXThrowExpression EOF ; */
public final EObject entryRuleXThrowExpression ( ) throws RecognitionException { } } | EObject current = null ; EObject iv_ruleXThrowExpression = null ; try { // InternalXbase . g : 4896:57 : ( iv _ ruleXThrowExpression = ruleXThrowExpression EOF )
// InternalXbase . g : 4897:2 : iv _ ruleXThrowExpression = ruleXThrowExpression EOF
{ if ( state . backtracking == 0 ) { newCompositeNode ( grammarAccess . getXThrowExpressionRule ( ) ) ; } pushFollow ( FOLLOW_1 ) ; iv_ruleXThrowExpression = ruleXThrowExpression ( ) ; state . _fsp -- ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { current = iv_ruleXThrowExpression ; } match ( input , EOF , FOLLOW_2 ) ; if ( state . failed ) return current ; } } catch ( RecognitionException re ) { recover ( input , re ) ; appendSkippedTokens ( ) ; } finally { } return current ; |
public class ClientVpnEndpoint { /** * Information about the authentication method used by the Client VPN endpoint .
* < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use
* { @ link # setAuthenticationOptions ( java . util . Collection ) } or
* { @ link # withAuthenticationOptions ( java . util . Collection ) } if you want to override the existing values .
* @ param authenticationOptions
* Information about the authentication method used by the Client VPN endpoint .
* @ return Returns a reference to this object so that method calls can be chained together . */
public ClientVpnEndpoint withAuthenticationOptions ( ClientVpnAuthentication ... authenticationOptions ) { } } | if ( this . authenticationOptions == null ) { setAuthenticationOptions ( new com . amazonaws . internal . SdkInternalList < ClientVpnAuthentication > ( authenticationOptions . length ) ) ; } for ( ClientVpnAuthentication ele : authenticationOptions ) { this . authenticationOptions . add ( ele ) ; } return this ; |
public class SelectorDisplayer { /** * Displayable */
@ Override public void render ( Graphic g ) { } } | if ( model . isSelecting ( ) ) { final Area selectionArea = model . getSelectionArea ( ) ; final int x = ( int ) viewer . getViewpointX ( selectionArea . getX ( ) ) ; final int w = selectionArea . getWidth ( ) ; int y = ( int ) viewer . getViewpointY ( model . getSelectRawY ( ) ) ; int h = ( int ) model . getSelectRawH ( ) ; if ( h < 0 ) { y += h ; h = - h ; } if ( y < 0 ) { h += y ; y = 0 ; } if ( w > 0 && h > 0 ) { g . setColor ( colorSelection ) ; g . drawRect ( x , y , w , h , false ) ; } } |
public class VimGenerator2 { /** * Generate the annotations .
* @ param it the receiver of the generated elements . */
protected void generateAnnotations ( IStyleAppendable it ) { } } | appendComment ( it , "annnotation" ) ; // $ NON - NLS - 1 $
appendMatch ( it , "sarlAnnotation" , "@[_a-zA-Z][_0-9a-zA-Z]*\\([.$][_a-zA-Z][_0-9a-zA-Z]*\\)*" ) ; // $ NON - NLS - 1 $ / / $ NON - NLS - 2 $
appendCluster ( it , "sarlAnnotation" ) ; // $ NON - NLS - 1 $
hilight ( "sarlAnnotation" , VimSyntaxGroup . PRE_PROCESSOR ) ; // $ NON - NLS - 1 $
it . newLine ( ) ; |
public class JndiChangeNotifier { /** * Check if current value bounded to JNDI key is different than previous , if yes then invoked
* { @ link AddressChangeListener # changeAddresses ( List ) } method . */
public void check ( ) { } } | List < InetSocketAddress > newAddrs = getAddresses ( ) ; if ( newAddrs != null && ! newAddrs . equals ( currentAddrs ) ) { if ( addressChangeListener != null ) { currentAddrs = newAddrs ; addressChangeListener . changeAddresses ( newAddrs ) ; } else { LOGGER . error ( "Address change listener is null for JNDI key {}, cannot notify about new value {}" , getJndiKey ( ) , newAddrs ) ; } } |
public class ReflectionUtils { /** * Invokes a method .
* @ param instance The instance
* @ param method The method
* @ param arguments The arguments
* @ param < R > The return type
* @ param < T > The instance type
* @ return The result */
public static < R , T > R invokeMethod ( T instance , Method method , Object ... arguments ) { } } | try { return ( R ) method . invoke ( instance , arguments ) ; } catch ( IllegalAccessException e ) { throw new InvocationException ( "Illegal access invoking method [" + method + "]: " + e . getMessage ( ) , e ) ; } catch ( InvocationTargetException e ) { throw new InvocationException ( "Exception occurred invoking method [" + method + "]: " + e . getMessage ( ) , e ) ; } |
public class ObjectFactory { /** * Create an instance of { @ link JAXBElement } { @ code < } { @ link String } { @ code > } */
@ XmlElementDecl ( namespace = "http://docs.oasis-open.org/ns/cmis/messaging/200908/" , name = "filter" , scope = GetFolderTree . class ) public JAXBElement < String > createGetFolderTreeFilter ( String value ) { } } | return new JAXBElement < String > ( _GetPropertiesFilter_QNAME , String . class , GetFolderTree . class , value ) ; |
public class InternalXbaseWithAnnotationsParser { /** * InternalXbaseWithAnnotations . g : 1945:1 : ruleXCastedExpression returns [ EObject current = null ] : ( this _ XPostfixOperation _ 0 = ruleXPostfixOperation ( ( ( ( ( ) ' as ' ) ) = > ( ( ) otherlv _ 2 = ' as ' ) ) ( ( lv _ type _ 3_0 = ruleJvmTypeReference ) ) ) * ) ; */
public final EObject ruleXCastedExpression ( ) throws RecognitionException { } } | EObject current = null ; Token otherlv_2 = null ; EObject this_XPostfixOperation_0 = null ; EObject lv_type_3_0 = null ; enterRule ( ) ; try { // InternalXbaseWithAnnotations . g : 1951:2 : ( ( this _ XPostfixOperation _ 0 = ruleXPostfixOperation ( ( ( ( ( ) ' as ' ) ) = > ( ( ) otherlv _ 2 = ' as ' ) ) ( ( lv _ type _ 3_0 = ruleJvmTypeReference ) ) ) * ) )
// InternalXbaseWithAnnotations . g : 1952:2 : ( this _ XPostfixOperation _ 0 = ruleXPostfixOperation ( ( ( ( ( ) ' as ' ) ) = > ( ( ) otherlv _ 2 = ' as ' ) ) ( ( lv _ type _ 3_0 = ruleJvmTypeReference ) ) ) * )
{ // InternalXbaseWithAnnotations . g : 1952:2 : ( this _ XPostfixOperation _ 0 = ruleXPostfixOperation ( ( ( ( ( ) ' as ' ) ) = > ( ( ) otherlv _ 2 = ' as ' ) ) ( ( lv _ type _ 3_0 = ruleJvmTypeReference ) ) ) * )
// InternalXbaseWithAnnotations . g : 1953:3 : this _ XPostfixOperation _ 0 = ruleXPostfixOperation ( ( ( ( ( ) ' as ' ) ) = > ( ( ) otherlv _ 2 = ' as ' ) ) ( ( lv _ type _ 3_0 = ruleJvmTypeReference ) ) ) *
{ if ( state . backtracking == 0 ) { newCompositeNode ( grammarAccess . getXCastedExpressionAccess ( ) . getXPostfixOperationParserRuleCall_0 ( ) ) ; } pushFollow ( FOLLOW_29 ) ; this_XPostfixOperation_0 = ruleXPostfixOperation ( ) ; state . _fsp -- ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { current = this_XPostfixOperation_0 ; afterParserOrEnumRuleCall ( ) ; } // InternalXbaseWithAnnotations . g : 1961:3 : ( ( ( ( ( ) ' as ' ) ) = > ( ( ) otherlv _ 2 = ' as ' ) ) ( ( lv _ type _ 3_0 = ruleJvmTypeReference ) ) ) *
loop33 : do { int alt33 = 2 ; int LA33_0 = input . LA ( 1 ) ; if ( ( LA33_0 == 49 ) ) { int LA33_2 = input . LA ( 2 ) ; if ( ( synpred18_InternalXbaseWithAnnotations ( ) ) ) { alt33 = 1 ; } } switch ( alt33 ) { case 1 : // InternalXbaseWithAnnotations . g : 1962:4 : ( ( ( ( ) ' as ' ) ) = > ( ( ) otherlv _ 2 = ' as ' ) ) ( ( lv _ type _ 3_0 = ruleJvmTypeReference ) )
{ // InternalXbaseWithAnnotations . g : 1962:4 : ( ( ( ( ) ' as ' ) ) = > ( ( ) otherlv _ 2 = ' as ' ) )
// InternalXbaseWithAnnotations . g : 1963:5 : ( ( ( ) ' as ' ) ) = > ( ( ) otherlv _ 2 = ' as ' )
{ // InternalXbaseWithAnnotations . g : 1969:5 : ( ( ) otherlv _ 2 = ' as ' )
// InternalXbaseWithAnnotations . g : 1970:6 : ( ) otherlv _ 2 = ' as '
{ // InternalXbaseWithAnnotations . g : 1970:6 : ( )
// InternalXbaseWithAnnotations . g : 1971:7:
{ if ( state . backtracking == 0 ) { current = forceCreateModelElementAndSet ( grammarAccess . getXCastedExpressionAccess ( ) . getXCastedExpressionTargetAction_1_0_0_0 ( ) , current ) ; } } otherlv_2 = ( Token ) match ( input , 49 , FOLLOW_22 ) ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { newLeafNode ( otherlv_2 , grammarAccess . getXCastedExpressionAccess ( ) . getAsKeyword_1_0_0_1 ( ) ) ; } } } // InternalXbaseWithAnnotations . g : 1983:4 : ( ( lv _ type _ 3_0 = ruleJvmTypeReference ) )
// InternalXbaseWithAnnotations . g : 1984:5 : ( lv _ type _ 3_0 = ruleJvmTypeReference )
{ // InternalXbaseWithAnnotations . g : 1984:5 : ( lv _ type _ 3_0 = ruleJvmTypeReference )
// InternalXbaseWithAnnotations . g : 1985:6 : lv _ type _ 3_0 = ruleJvmTypeReference
{ if ( state . backtracking == 0 ) { newCompositeNode ( grammarAccess . getXCastedExpressionAccess ( ) . getTypeJvmTypeReferenceParserRuleCall_1_1_0 ( ) ) ; } pushFollow ( FOLLOW_29 ) ; lv_type_3_0 = ruleJvmTypeReference ( ) ; state . _fsp -- ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { if ( current == null ) { current = createModelElementForParent ( grammarAccess . getXCastedExpressionRule ( ) ) ; } set ( current , "type" , lv_type_3_0 , "org.eclipse.xtext.xbase.Xtype.JvmTypeReference" ) ; afterParserOrEnumRuleCall ( ) ; } } } } break ; default : break loop33 ; } } while ( true ) ; } } if ( state . backtracking == 0 ) { leaveRule ( ) ; } } catch ( RecognitionException re ) { recover ( input , re ) ; appendSkippedTokens ( ) ; } finally { } return current ; |
public class HttpUtil { /** * Remove headers form header array that match Strings in headersToRemove
* @ param headersToClean Array of { @ link Header } : Headers to clean up
* @ param headersToRemove Header names to remove */
private static Header [ ] removeHeaders ( Header [ ] headersToClean , String [ ] headersToRemove ) { } } | ArrayList < Header > headers = new ArrayList < > ( ) ; if ( headersToClean == null ) { return null ; } for ( Header header : headersToClean ) { if ( ! StringUtils . equalsAnyIgnoreCase ( header . getName ( ) , headersToRemove ) ) { headers . add ( header ) ; } } LOG . debug ( "Removed the content-length and content-type headers as the HTTP Client lib will care " + "about them as soon as the entity is set on the POST object." ) ; Header [ ] headersArray = new Header [ headers . size ( ) ] ; headersArray = headers . toArray ( headersArray ) ; return headersArray ; |
public class ArcX4 { /** * Weights are updated as 1 + coef * errors < sup > expo < / sup > . This sets the
* coefficient used to update the errors
* @ param coef the multiplicative factor on the errors in weight construction */
public void setCoefficient ( double coef ) { } } | if ( coef <= 0 || Double . isInfinite ( coef ) || Double . isNaN ( coef ) ) throw new ArithmeticException ( "The coefficient must be a positive constant" ) ; this . coef = coef ; |
public class RegistriesInner { /** * Lists the policies for the specified container registry .
* @ param resourceGroupName The name of the resource group to which the container registry belongs .
* @ param registryName The name of the container registry .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ throws CloudException thrown if the request is rejected by server
* @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
* @ return the RegistryPoliciesInner object if successful . */
public RegistryPoliciesInner listPolicies ( String resourceGroupName , String registryName ) { } } | return listPoliciesWithServiceResponseAsync ( resourceGroupName , registryName ) . toBlocking ( ) . single ( ) . body ( ) ; |
public class Termination { /** * The countries to which calls are allowed .
* < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use
* { @ link # setCallingRegions ( java . util . Collection ) } or { @ link # withCallingRegions ( java . util . Collection ) } if you want
* to override the existing values .
* @ param callingRegions
* The countries to which calls are allowed .
* @ return Returns a reference to this object so that method calls can be chained together . */
public Termination withCallingRegions ( String ... callingRegions ) { } } | if ( this . callingRegions == null ) { setCallingRegions ( new java . util . ArrayList < String > ( callingRegions . length ) ) ; } for ( String ele : callingRegions ) { this . callingRegions . add ( ele ) ; } return this ; |
public class FctBnCnvBnFromRs { /** * < p > Get CnvBnRsToEnum ( create and put into map ) . < / p >
* @ return requested CnvBnRsToEnum
* @ throws Exception - an exception */
protected final CnvBnRsToEnum < RS > createPutCnvBnRsToEnum ( ) throws Exception { } } | CnvBnRsToEnum < RS > convrt = new CnvBnRsToEnum < RS > ( ) ; // assigning fully initialized object :
this . convertersMap . put ( CnvBnRsToEnum . class . getSimpleName ( ) , convrt ) ; return convrt ; |
public class CentralDogmaEndpointGroup { /** * Creates a new { @ link CentralDogmaEndpointGroup } .
* @ param centralDogma a { @ link CentralDogma }
* @ param projectName a Central Dogma project name
* @ param repositoryName a Central Dogma repository name
* @ param query a { @ link Query } to route file
* @ param endpointListDecoder an { @ link EndpointListDecoder } */
public static < T > CentralDogmaEndpointGroup < T > of ( CentralDogma centralDogma , String projectName , String repositoryName , Query < T > query , EndpointListDecoder < T > endpointListDecoder ) { } } | return ofWatcher ( centralDogma . fileWatcher ( projectName , repositoryName , query ) , endpointListDecoder ) ; |
public class NutMongoDbStarter { /** * 获取MongoClient实例 */
@ SuppressWarnings ( "unchecked" ) @ IocBean ( name = "mongoClient" ) public MongoClient createMongoClient ( ) { } } | // 如果配的是URL , 直接返回了
if ( conf . containsKey ( PROP_URI ) ) { return new MongoClient ( new MongoClientURI ( conf . get ( PROP_URI ) ) ) ; } return new MongoClient ( ioc . get ( List . class , "mongodbServerAddressList" ) , ioc . get ( List . class , "mongodbCredentialList" ) , ioc . get ( MongoClientOptions . class ) ) ; |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.