signature stringlengths 43 39.1k | implementation stringlengths 0 450k |
|---|---|
public class RespondDecisionTaskCompletedRequest { /** * The list of decisions ( possibly empty ) made by the decider while processing this decision task . See the docs for
* the < a > Decision < / a > structure for details .
* < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use
* { @ link # setDecisions ( java . util . Collection ) } or { @ link # withDecisions ( java . util . Collection ) } if you want to
* override the existing values .
* @ param decisions
* The list of decisions ( possibly empty ) made by the decider while processing this decision task . See the
* docs for the < a > Decision < / a > structure for details .
* @ return Returns a reference to this object so that method calls can be chained together . */
public RespondDecisionTaskCompletedRequest withDecisions ( Decision ... decisions ) { } } | if ( this . decisions == null ) { setDecisions ( new java . util . ArrayList < Decision > ( decisions . length ) ) ; } for ( Decision ele : decisions ) { this . decisions . add ( ele ) ; } return this ; |
public class ExchangeRate { /** * Retrieves the exchange rates from the given currency to every supported currency . */
public static ExchangeRate retrieve ( String currency ) throws StripeException { } } | return retrieve ( currency , ( Map < String , Object > ) null , ( RequestOptions ) null ) ; |
public class VirtualNetworkTapsInner { /** * Updates an VirtualNetworkTap tags .
* @ param resourceGroupName The name of the resource group .
* @ param tapName The name of the tap .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the observable for the request */
public Observable < VirtualNetworkTapInner > updateTagsAsync ( String resourceGroupName , String tapName ) { } } | return updateTagsWithServiceResponseAsync ( resourceGroupName , tapName ) . map ( new Func1 < ServiceResponse < VirtualNetworkTapInner > , VirtualNetworkTapInner > ( ) { @ Override public VirtualNetworkTapInner call ( ServiceResponse < VirtualNetworkTapInner > response ) { return response . body ( ) ; } } ) ; |
public class DocBookBuildUtilities { /** * Convert a DOM Document to a Formatted String representation .
* @ param doc The DOM Document to be converted and formatted .
* @ param xmlFormatProperties The XML Formatting Properties .
* @ return The converted XML String representation . */
public static String convertDocumentToFormattedString ( final Document doc , final XMLFormatProperties xmlFormatProperties ) { } } | return convertDocumentToFormattedString ( doc , xmlFormatProperties , true ) ; |
public class ExtensionLoader { /** * 得到实例
* @ param alias 别名
* @ return 扩展实例 ( 已判断是否单例 ) */
public T getExtension ( String alias ) { } } | ExtensionClass < T > extensionClass = getExtensionClass ( alias ) ; if ( extensionClass == null ) { throw new SofaRpcRuntimeException ( "Not found extension of " + interfaceName + " named: \"" + alias + "\"!" ) ; } else { if ( extensible . singleton ( ) && factory != null ) { T t = factory . get ( alias ) ; if ( t == null ) { synchronized ( this ) { t = factory . get ( alias ) ; if ( t == null ) { t = extensionClass . getExtInstance ( ) ; factory . put ( alias , t ) ; } } } return t ; } else { return extensionClass . getExtInstance ( ) ; } } |
public class GitFlowHotfixFinishMojo { /** * { @ inheritDoc } */
@ Override public void execute ( ) throws MojoExecutionException , MojoFailureException { } } | validateConfiguration ( preHotfixGoals , postHotfixGoals ) ; try { // check uncommitted changes
checkUncommittedChanges ( ) ; String hotfixBranchName = null ; if ( settings . isInteractiveMode ( ) ) { hotfixBranchName = promptBranchName ( ) ; } else if ( StringUtils . isNotBlank ( hotfixVersion ) ) { final String branch = gitFlowConfig . getHotfixBranchPrefix ( ) + hotfixVersion ; if ( ! gitCheckBranchExists ( branch ) ) { throw new MojoFailureException ( "Hotfix branch with name '" + branch + "' doesn't exist. Cannot finish hotfix." ) ; } hotfixBranchName = branch ; } if ( StringUtils . isBlank ( hotfixBranchName ) ) { throw new MojoFailureException ( "Hotfix branch name to finish is blank." ) ; } // support branch hotfix
String supportBranchName = null ; boolean supportHotfix = hotfixBranchName . startsWith ( gitFlowConfig . getHotfixBranchPrefix ( ) + gitFlowConfig . getSupportBranchPrefix ( ) ) ; // get support branch name w / o version part
if ( supportHotfix ) { supportBranchName = hotfixBranchName . substring ( gitFlowConfig . getHotfixBranchPrefix ( ) . length ( ) ) ; supportBranchName = supportBranchName . substring ( 0 , supportBranchName . lastIndexOf ( '/' ) ) ; } // fetch and check remote
if ( fetchRemote ) { gitFetchRemoteAndCompare ( hotfixBranchName ) ; if ( supportBranchName != null ) { gitFetchRemoteAndCompare ( supportBranchName ) ; } else { if ( notSameProdDevName ( ) ) { gitFetchRemoteAndCreate ( gitFlowConfig . getDevelopmentBranch ( ) ) ; gitFetchRemoteAndCompare ( gitFlowConfig . getDevelopmentBranch ( ) ) ; } gitFetchRemoteAndCreate ( gitFlowConfig . getProductionBranch ( ) ) ; gitFetchRemoteAndCompare ( gitFlowConfig . getProductionBranch ( ) ) ; } } // git checkout hotfix / . . .
gitCheckout ( hotfixBranchName ) ; if ( ! skipTestProject ) { // mvn clean test
mvnCleanTest ( ) ; } // maven goals before merge
if ( StringUtils . isNotBlank ( preHotfixGoals ) ) { mvnRun ( preHotfixGoals ) ; } String currentHotfixVersion = getCurrentProjectVersion ( ) ; if ( useSnapshotInHotfix && ArtifactUtils . isSnapshot ( currentHotfixVersion ) ) { String commitVersion = currentHotfixVersion . replace ( "-" + Artifact . SNAPSHOT_VERSION , "" ) ; mvnSetVersions ( commitVersion ) ; Map < String , String > properties = new HashMap < String , String > ( ) ; properties . put ( "version" , commitVersion ) ; gitCommit ( commitMessages . getHotfixFinishMessage ( ) , properties ) ; } if ( supportBranchName != null ) { gitCheckout ( supportBranchName ) ; // git merge - - no - ff hotfix / . . .
gitMergeNoff ( hotfixBranchName ) ; } else if ( ! skipMergeProdBranch ) { // git checkout master
gitCheckout ( gitFlowConfig . getProductionBranch ( ) ) ; // git merge - - no - ff hotfix / . . .
gitMergeNoff ( hotfixBranchName ) ; } final String currentVersion = getCurrentProjectVersion ( ) ; if ( ! skipTag ) { String tagVersion = currentVersion ; if ( ( tychoBuild || useSnapshotInHotfix ) && ArtifactUtils . isSnapshot ( tagVersion ) ) { tagVersion = tagVersion . replace ( "-" + Artifact . SNAPSHOT_VERSION , "" ) ; } Map < String , String > properties = new HashMap < String , String > ( ) ; properties . put ( "version" , tagVersion ) ; // git tag - a . . .
gitTag ( gitFlowConfig . getVersionTagPrefix ( ) + tagVersion , commitMessages . getTagHotfixMessage ( ) , gpgSignTag , properties ) ; } if ( skipMergeProdBranch && ( supportBranchName == null ) ) { // switch to production branch so hotfix branch can be deleted
gitCheckout ( gitFlowConfig . getProductionBranch ( ) ) ; } // maven goals after merge
if ( StringUtils . isNotBlank ( postHotfixGoals ) ) { mvnRun ( postHotfixGoals ) ; } // check whether release branch exists
// git for - each - ref - - count = 1 - - format = " % ( refname : short ) "
// refs / heads / release / *
final String releaseBranch = gitFindBranches ( gitFlowConfig . getReleaseBranchPrefix ( ) , true ) ; if ( supportBranchName == null ) { // if release branch exists merge hotfix changes into it
if ( StringUtils . isNotBlank ( releaseBranch ) ) { // git checkout release
gitCheckout ( releaseBranch ) ; String releaseBranchVersion = getCurrentProjectVersion ( ) ; if ( ! currentVersion . equals ( releaseBranchVersion ) ) { // set version to avoid merge conflict
mvnSetVersions ( currentVersion ) ; gitCommit ( commitMessages . getUpdateReleaseToAvoidConflictsMessage ( ) ) ; } // git merge - - no - ff hotfix / . . .
gitMergeNoff ( hotfixBranchName ) ; if ( ! currentVersion . equals ( releaseBranchVersion ) ) { mvnSetVersions ( releaseBranchVersion ) ; gitCommit ( commitMessages . getUpdateReleaseBackPreMergeStateMessage ( ) ) ; } } else if ( ! skipMergeDevBranch ) { GitFlowVersionInfo developVersionInfo = new GitFlowVersionInfo ( currentVersion ) ; if ( notSameProdDevName ( ) ) { // git checkout develop
gitCheckout ( gitFlowConfig . getDevelopmentBranch ( ) ) ; developVersionInfo = new GitFlowVersionInfo ( getCurrentProjectVersion ( ) ) ; // set version to avoid merge conflict
mvnSetVersions ( currentVersion ) ; gitCommit ( commitMessages . getHotfixVersionUpdateMessage ( ) ) ; // git merge - - no - ff hotfix / . . .
gitMergeNoff ( hotfixBranchName ) ; // which version to increment
GitFlowVersionInfo hotfixVersionInfo = new GitFlowVersionInfo ( currentVersion ) ; if ( developVersionInfo . compareTo ( hotfixVersionInfo ) < 0 ) { developVersionInfo = hotfixVersionInfo ; } } // get next snapshot version
final String nextSnapshotVersion = developVersionInfo . getSnapshotVersionString ( ) ; if ( StringUtils . isBlank ( nextSnapshotVersion ) ) { throw new MojoFailureException ( "Next snapshot version is blank." ) ; } // mvn versions : set - DnewVersion = . . .
// - DgenerateBackupPoms = false
mvnSetVersions ( nextSnapshotVersion ) ; Map < String , String > properties = new HashMap < String , String > ( ) ; properties . put ( "version" , nextSnapshotVersion ) ; // git commit - a - m updating for next development version
gitCommit ( commitMessages . getHotfixFinishMessage ( ) , properties ) ; } } if ( installProject ) { // mvn clean install
mvnCleanInstall ( ) ; } if ( pushRemote ) { if ( supportBranchName != null ) { gitPush ( supportBranchName , ! skipTag ) ; } else { gitPush ( gitFlowConfig . getProductionBranch ( ) , ! skipTag ) ; if ( StringUtils . isNotBlank ( releaseBranch ) ) { gitPush ( releaseBranch , ! skipTag ) ; } else if ( StringUtils . isBlank ( releaseBranch ) && notSameProdDevName ( ) ) { // if no release branch
gitPush ( gitFlowConfig . getDevelopmentBranch ( ) , ! skipTag ) ; } } if ( ! keepBranch ) { gitPushDelete ( hotfixBranchName ) ; } } if ( ! keepBranch ) { if ( skipMergeProdBranch ) { // force delete as upstream merge is skipped
gitBranchDeleteForce ( hotfixBranchName ) ; } else { // git branch - d hotfix / . . .
gitBranchDelete ( hotfixBranchName ) ; } } } catch ( Exception e ) { throw new MojoFailureException ( "hotfix-finish" , e ) ; } |
public class CommerceDiscountLocalServiceBaseImpl { /** * Performs a dynamic query on the database and returns the matching rows .
* @ param dynamicQuery the dynamic query
* @ return the matching rows */
@ Override public < T > List < T > dynamicQuery ( DynamicQuery dynamicQuery ) { } } | return commerceDiscountPersistence . findWithDynamicQuery ( dynamicQuery ) ; |
public class BitsUtil { /** * Compute the Hamming distance ( Size of symmetric difference ) , i . e .
* { @ code cardinality ( a ^ b ) } .
* @ param x First vector
* @ param y Second vector
* @ return Cardinality of symmetric difference */
public static int hammingDistance ( long [ ] x , long [ ] y ) { } } | final int lx = x . length , ly = y . length ; final int min = ( lx < ly ) ? lx : ly ; int i = 0 , h = 0 ; for ( ; i < min ; i ++ ) { h += Long . bitCount ( x [ i ] ^ y [ i ] ) ; } for ( ; i < lx ; i ++ ) { h += Long . bitCount ( x [ i ] ) ; } for ( ; i < ly ; i ++ ) { h += Long . bitCount ( y [ i ] ) ; } return h ; |
public class FrameworkMethod { /** * Adds to { @ code errors } if this method :
* < ul >
* < li > is not public , or
* < li > returns something other than void , or
* < li > is static ( given { @ code isStatic is false } ) , or
* < li > is not static ( given { @ code isStatic is true } ) .
* < / ul > */
public void validatePublicVoid ( boolean isStatic , List < Throwable > errors ) { } } | if ( isStatic ( ) != isStatic ) { String state = isStatic ? "should" : "should not" ; errors . add ( new Exception ( "Method " + method . getName ( ) + "() " + state + " be static" ) ) ; } if ( ! isPublic ( ) ) { errors . add ( new Exception ( "Method " + method . getName ( ) + "() should be public" ) ) ; } if ( method . getReturnType ( ) != Void . TYPE ) { errors . add ( new Exception ( "Method " + method . getName ( ) + "() should be void" ) ) ; } |
public class Animation { /** * Start the animation .
* Changing properties once the animation is running can have unpredictable
* results .
* @ param engine
* The global animation engine .
* @ return { @ code this } , so you can save the instance at the end of a chain
* of calls . */
public Animation start ( GVRAnimationEngine engine ) { } } | ( ( GVRAnimation ) getAnimation ( ) ) . start ( engine ) ; mIsRunning = true ; return this ; |
public class EllipseClustersIntoHexagonalGrid { /** * Selects the closest node with the assumption that it ' s along the side of the grid . */
static NodeInfo selectClosestSide ( NodeInfo a , NodeInfo b ) { } } | double ratio = 1.7321 ; NodeInfo best = null ; double bestDistance = Double . MAX_VALUE ; Edge bestEdgeA = null ; Edge bestEdgeB = null ; for ( int i = 0 ; i < a . edges . size ; i ++ ) { NodeInfo aa = a . edges . get ( i ) . target ; if ( aa . marked ) continue ; for ( int j = 0 ; j < b . edges . size ; j ++ ) { NodeInfo bb = b . edges . get ( j ) . target ; if ( bb . marked ) continue ; if ( aa == bb ) { double da = EllipsesIntoClusters . axisAdjustedDistanceSq ( a . ellipse , aa . ellipse ) ; double db = EllipsesIntoClusters . axisAdjustedDistanceSq ( b . ellipse , aa . ellipse ) ; da = Math . sqrt ( da ) ; db = Math . sqrt ( db ) ; double max , min ; if ( da > db ) { max = da ; min = db ; } else { max = db ; min = da ; } // see how much it deviates from the ideal length with no distortion
double diffRatio = Math . abs ( max - min * ratio ) / max ; if ( diffRatio > 0.25 ) continue ; // TODO reject if too far
double d = da + db ; if ( d < bestDistance ) { bestDistance = d ; best = aa ; bestEdgeA = a . edges . get ( i ) ; bestEdgeB = b . edges . get ( j ) ; } break ; } } } // check the angles
if ( best != null ) { double angleA = UtilAngle . distanceCW ( bestEdgeA . angle , bestEdgeB . angle ) ; if ( angleA < Math . PI * 0.25 ) // expected with zero distortion is 30 degrees
return best ; else return null ; } return null ; |
public class TagMagix { /** * get ( and cache ) a regex Pattern for locating an HTML attribute value
* within a particular tag . if found , the pattern will have the attribute
* value in group 1 . Note that the attribute value may contain surrounding
* apostrophe ( ' ) or quote ( " ) characters .
* @ param tagName
* @ param attrName
* @ return Pattern to match the tag - attribute ' s value */
private synchronized static Pattern getPattern ( String tagName , String attrName ) { } } | String key = tagName + " " + attrName ; Pattern pc = pcPatterns . get ( key ) ; if ( pc == null ) { String tagPatString = "<\\s*" + tagName + "\\s+[^>]*\\b" + attrName + "\\s*=\\s*(" + ANY_ATTR_VALUE + ")(?:\\s|>)?" ; pc = Pattern . compile ( tagPatString , Pattern . CASE_INSENSITIVE ) ; pcPatterns . put ( key , pc ) ; } return pc ; |
public class BaseRichMediaStudioCreative { /** * Gets the artworkType value for this BaseRichMediaStudioCreative .
* @ return artworkType * The type of artwork used in this creative . This attribute is
* readonly . */
public com . google . api . ads . admanager . axis . v201902 . RichMediaStudioCreativeArtworkType getArtworkType ( ) { } } | return artworkType ; |
public class MapTileCollisionComputer { /** * CHECKSTYLE IGNORE LINE : ExecutableStatementCount | CyclomaticComplexity | NPathComplexity */
private CollisionResult computeCollision ( CollisionCategory category , double sh , double sv , double sx , double sy , int max ) { } } | double x = sh ; double y = sv ; double ox = x ; double oy = y ; boolean collX = false ; boolean collY = false ; CollisionResult last = null ; for ( int cur = 0 ; cur < max ; cur ++ ) { CollisionResult current = computeCollision ( category , ox , oy , x , y ) ; if ( current != null ) { last = current ; if ( current . getX ( ) != null ) { x = current . getX ( ) . doubleValue ( ) ; collX = true ; } else { collX = false ; } if ( current . getY ( ) != null ) { y = current . getY ( ) . doubleValue ( ) ; oy = y ; } } else { collX = false ; } if ( ! collX ) { ox = x ; x += sx ; } current = computeCollision ( category , ox , oy , x , y ) ; if ( current != null ) { last = current ; if ( current . getX ( ) != null ) { x = current . getX ( ) . doubleValue ( ) ; } if ( current . getY ( ) != null ) { y = current . getY ( ) . doubleValue ( ) ; collY = true ; } else { collY = false ; } } else { collY = false ; } if ( ! collY ) { oy = y ; y += sy ; } } if ( category . isGlue ( ) ) { if ( last == null ) { last = computeCollision ( category , ox , oy , x , y - 1 ) ; } if ( last == null ) { last = computeCollision ( category , ox , oy , x , y - 2 ) ; } } return last ; |
public class JsonWriter { /** * Write a UTF escape sequence using either an one or two - byte seqience .
* @ param out the output
* @ param charToEscape the character to escape . */
private static void escape ( OutputAccessor out , int charToEscape ) { } } | out . write ( '\\' ) ; out . write ( 'u' ) ; if ( charToEscape > 0xFF ) { int hi = ( charToEscape >> 8 ) & 0xFF ; out . write ( HEX_BYTES [ hi >> 4 ] ) ; out . write ( HEX_BYTES [ hi & 0xF ] ) ; charToEscape &= 0xFF ; } else { out . write ( ( byte ) '0' ) ; out . write ( ( byte ) '0' ) ; } // We know it ' s a control char , so only the last 2 chars are non - 0
out . write ( HEX_BYTES [ charToEscape >> 4 ] ) ; out . write ( HEX_BYTES [ charToEscape & 0xF ] ) ; |
public class DefaultContentHandler { /** * Sets the base directory . If the new base directory is different from
* the current one and < code > autoDelete < / code > is true , the current
* base directory ' s will be deleted . The new base directory need not exist
* yet . If it doesn ' t exist , it and all parent directories will be created
* the first time it ' s needed .
* @ param baseDir the new value , never < code > null < / code > .
* @ throws NullPointerException if the value is null . */
public void setBaseDir ( File baseDir ) { } } | if ( this . baseDir != null && this . baseDir != baseDir && autoDelete ) { rmDir ( this . baseDir ) ; } this . baseDir = baseDir ; |
public class BranchFilterModule { /** * Get all topicrefs */
private List < Element > getTopicrefs ( final Element root ) { } } | final List < Element > res = new ArrayList < > ( ) ; final NodeList all = root . getElementsByTagName ( "*" ) ; for ( int i = 0 ; i < all . getLength ( ) ; i ++ ) { final Element elem = ( Element ) all . item ( i ) ; if ( MAP_TOPICREF . matches ( elem ) && isDitaFormat ( elem . getAttributeNode ( ATTRIBUTE_NAME_FORMAT ) ) && ! elem . getAttribute ( ATTRIBUTE_NAME_SCOPE ) . equals ( ATTR_SCOPE_VALUE_EXTERNAL ) ) { res . add ( elem ) ; } } return res ; |
public class Metrics { /** * Measures the time taken for short tasks and the count of these tasks .
* @ param name The base metric name
* @ param tags MUST be an even number of arguments representing key / value pairs of tags .
* @ return A new or existing timer . */
public static Timer timer ( String name , String ... tags ) { } } | return globalRegistry . timer ( name , tags ) ; |
public class DescribeConfigurationAggregatorsRequest { /** * The name of the configuration aggregators .
* < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use
* { @ link # setConfigurationAggregatorNames ( java . util . Collection ) } or
* { @ link # withConfigurationAggregatorNames ( java . util . Collection ) } if you want to override the existing values .
* @ param configurationAggregatorNames
* The name of the configuration aggregators .
* @ return Returns a reference to this object so that method calls can be chained together . */
public DescribeConfigurationAggregatorsRequest withConfigurationAggregatorNames ( String ... configurationAggregatorNames ) { } } | if ( this . configurationAggregatorNames == null ) { setConfigurationAggregatorNames ( new com . amazonaws . internal . SdkInternalList < String > ( configurationAggregatorNames . length ) ) ; } for ( String ele : configurationAggregatorNames ) { this . configurationAggregatorNames . add ( ele ) ; } return this ; |
public class AWSShieldClient { /** * Returns all ongoing DDoS attacks or all DDoS attacks during a specified time period .
* @ param listAttacksRequest
* @ return Result of the ListAttacks operation returned by the service .
* @ throws InternalErrorException
* Exception that indicates that a problem occurred with the service infrastructure . You can retry the
* request .
* @ throws InvalidParameterException
* Exception that indicates that the parameters passed to the API are invalid .
* @ throws InvalidOperationException
* Exception that indicates that the operation would not cause any change to occur .
* @ sample AWSShield . ListAttacks
* @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / shield - 2016-06-02 / ListAttacks " target = " _ top " > AWS API
* Documentation < / a > */
@ Override public ListAttacksResult listAttacks ( ListAttacksRequest request ) { } } | request = beforeClientExecution ( request ) ; return executeListAttacks ( request ) ; |
public class MatrixVectorReader { /** * Reads in the size of an array matrix . Skips initial comments */
public MatrixSize readArraySize ( ) throws IOException { } } | int numRows = getInt ( ) , numColumns = getInt ( ) ; return new MatrixSize ( numRows , numColumns , numRows * numColumns ) ; |
public class JMSService { /** * Creates a message .
* @ param text text data
* @ param sender Sender client ID .
* @ param recipients Comma - delimited list of recipient client IDs
* @ return Message
* @ throws JMSException if error thrown from creation of object message */
public Message createTextMessage ( String text , String sender , String recipients ) throws JMSException { } } | return decorateMessage ( getSession ( ) . createTextMessage ( text ) , sender , recipients ) ; |
public class BaseFunction { /** * Returns a { @ link DoubleVector } that is equal to { @ code c - v } . This
* method is used instead of the one in { @ link VectorMath } so that a { @ link
* DenseDynamicMagnitudeVector } can be used to represent the difference .
* This vector type is optimized for when many calls to magnitude are
* interleaved with updates to a few dimensions in the vector . */
protected static DoubleVector subtract ( DoubleVector c , DoubleVector v ) { } } | DoubleVector newCentroid = new DenseDynamicMagnitudeVector ( c . length ( ) ) ; // Special case sparse double vectors so that we don ' t incure a possibly
// log n get operation for each zero value , as that ' s the common case
// for CompactSparseVector .
if ( v instanceof SparseDoubleVector ) { SparseDoubleVector sv = ( SparseDoubleVector ) v ; int [ ] nonZeros = sv . getNonZeroIndices ( ) ; int sparseIndex = 0 ; for ( int i = 0 ; i < c . length ( ) ; ++ i ) { double value = c . get ( i ) ; if ( sparseIndex < nonZeros . length && i == nonZeros [ sparseIndex ] ) value -= sv . get ( nonZeros [ sparseIndex ++ ] ) ; newCentroid . set ( i , value ) ; } } else for ( int i = 0 ; i < c . length ( ) ; ++ i ) newCentroid . set ( i , c . get ( i ) - v . get ( i ) ) ; return newCentroid ; |
public class ResetSnapshotAttributeRequest { /** * This method is intended for internal use only . Returns the marshaled request configured with additional
* parameters to enable operation dry - run . */
@ Override public Request < ResetSnapshotAttributeRequest > getDryRunRequest ( ) { } } | Request < ResetSnapshotAttributeRequest > request = new ResetSnapshotAttributeRequestMarshaller ( ) . marshall ( this ) ; request . addParameter ( "DryRun" , Boolean . toString ( true ) ) ; return request ; |
public class UsersModule { /** * Actions */
public Promise < Void > editName ( final int uid , final String name ) { } } | return getUsersStorage ( ) . getValueAsync ( uid ) . map ( User :: getAccessHash ) . flatMap ( aLong -> api ( new RequestEditUserLocalName ( uid , aLong , name ) ) ) . flatMap ( responseSeq -> updates ( ) . applyUpdate ( responseSeq . getSeq ( ) , responseSeq . getState ( ) , new UpdateUserLocalNameChanged ( uid , name ) ) ) ; |
public class CmsHtmlImportDialog { /** * This function fills the < code > { @ link CmsHtmlImport } < / code > Object based on
* the values in the import / export configuration file . < p > */
protected void fillHtmlImport ( ) { } } | CmsExtendedHtmlImportDefault extimport = OpenCms . getImportExportManager ( ) . getExtendedHtmlImportDefault ( ) ; m_htmlimport . setDestinationDir ( extimport . getDestinationDir ( ) ) ; m_htmlimport . setInputDir ( extimport . getInputDir ( ) ) ; m_htmlimport . setDownloadGallery ( extimport . getDownloadGallery ( ) ) ; m_htmlimport . setImageGallery ( extimport . getImageGallery ( ) ) ; m_htmlimport . setLinkGallery ( extimport . getLinkGallery ( ) ) ; m_htmlimport . setTemplate ( extimport . getTemplate ( ) ) ; m_htmlimport . setElement ( extimport . getElement ( ) ) ; m_htmlimport . setLocale ( extimport . getLocale ( ) ) ; m_htmlimport . setInputEncoding ( extimport . getEncoding ( ) ) ; m_htmlimport . setStartPattern ( extimport . getStartPattern ( ) ) ; m_htmlimport . setEndPattern ( extimport . getEndPattern ( ) ) ; m_htmlimport . setOverwrite ( Boolean . valueOf ( extimport . getOverwrite ( ) ) . booleanValue ( ) ) ; m_htmlimport . setKeepBrokenLinks ( Boolean . valueOf ( extimport . getKeepBrokenLinks ( ) ) . booleanValue ( ) ) ; |
public class SearchPlaces { /** * Usage : java twitter4j . examples . geo . SearchPlaces [ ip address ] or [ latitude ] [ longitude ]
* @ param args message */
public static void main ( String [ ] args ) { } } | if ( args . length < 1 ) { System . out . println ( "Usage: java twitter4j.examples.geo.SearchPlaces [ip address] or [latitude] [longitude]" ) ; System . exit ( - 1 ) ; } try { Twitter twitter = new TwitterFactory ( ) . getInstance ( ) ; GeoQuery query ; if ( args . length == 2 ) { query = new GeoQuery ( new GeoLocation ( Double . parseDouble ( args [ 0 ] ) , Double . parseDouble ( args [ 1 ] ) ) ) ; } else { query = new GeoQuery ( args [ 0 ] ) ; } ResponseList < Place > places = twitter . searchPlaces ( query ) ; if ( places . size ( ) == 0 ) { System . out . println ( "No location associated with the specified IP address or lat/lang" ) ; } else { for ( Place place : places ) { System . out . println ( "id: " + place . getId ( ) + " name: " + place . getFullName ( ) ) ; Place [ ] containedWithinArray = place . getContainedWithIn ( ) ; if ( containedWithinArray != null && containedWithinArray . length != 0 ) { System . out . println ( " contained within:" ) ; for ( Place containedWithinPlace : containedWithinArray ) { System . out . println ( " id: " + containedWithinPlace . getId ( ) + " name: " + containedWithinPlace . getFullName ( ) ) ; } } } } System . exit ( 0 ) ; } catch ( TwitterException te ) { te . printStackTrace ( ) ; System . out . println ( "Failed to retrieve places: " + te . getMessage ( ) ) ; System . exit ( - 1 ) ; } |
public class PmiModuleConfig { /** * Add PmiDataInfo for a statistic ( WebSphere internal use only ) */
public synchronized void addData ( PmiDataInfo info ) { } } | if ( info != null ) perfData . put ( new Integer ( info . getId ( ) ) , info ) ; // System . out . println ( " & & & & & Adding " + info . getName ( ) + " to " + this . getShortName ( ) ) ;
// System . out . println ( " perfData . values ( ) = " + perfData . values ( ) ) ; |
public class LookoutSubscriber { /** * create RpcServerLookoutModel
* @ param request
* @ param response
* @ return */
private RpcServerLookoutModel createServerMetricsModel ( SofaRequest request , SofaResponse response ) { } } | RpcServerLookoutModel rpcServerMetricsModel = new RpcServerLookoutModel ( ) ; RpcInternalContext context = RpcInternalContext . getContext ( ) ; String app = request . getTargetAppName ( ) ; String service = request . getTargetServiceUniqueName ( ) ; String method = request . getMethodName ( ) ; String protocol = getStringAvoidNull ( request . getRequestProp ( RemotingConstants . HEAD_PROTOCOL ) ) ; String invokeType = request . getInvokeType ( ) ; String callerApp = getStringAvoidNull ( request . getRequestProp ( RemotingConstants . HEAD_APP_NAME ) ) ; Long elapsedTime = getLongAvoidNull ( context . getAttachment ( RpcConstants . INTERNAL_KEY_IMPL_ELAPSE ) ) ; boolean success = response != null && ! response . isError ( ) && response . getErrorMsg ( ) == null && ( ! ( response . getAppResponse ( ) instanceof Throwable ) ) ; rpcServerMetricsModel . setApp ( app ) ; rpcServerMetricsModel . setService ( service ) ; rpcServerMetricsModel . setMethod ( method ) ; rpcServerMetricsModel . setProtocol ( protocol ) ; rpcServerMetricsModel . setInvokeType ( invokeType ) ; rpcServerMetricsModel . setCallerApp ( callerApp ) ; rpcServerMetricsModel . setElapsedTime ( elapsedTime ) ; rpcServerMetricsModel . setSuccess ( success ) ; return rpcServerMetricsModel ; |
public class Favicon { /** * Loads the icon from the file system once we get a reference to Vert . x
* @ param yoke
* @ param mount */
@ Override public Middleware init ( @ NotNull final Yoke yoke , @ NotNull final String mount ) { } } | try { super . init ( yoke , mount ) ; if ( path == null ) { icon = new Icon ( Utils . readResourceToBuffer ( getClass ( ) , "favicon.ico" ) ) ; } else { icon = new Icon ( fileSystem ( ) . readFileBlocking ( path ) ) ; } } catch ( Exception e ) { throw new RuntimeException ( e ) ; } return this ; |
public class JSONConverter { /** * serialize a Map ( as Struct )
* @ param map Map to serialize
* @ param sb
* @ param serializeQueryByColumns
* @ param done
* @ throws ConverterException */
private void _serializeMap ( PageContext pc , Set test , Map map , StringBuilder sb , boolean serializeQueryByColumns , Set < Object > done ) throws ConverterException { } } | sb . append ( goIn ( ) ) ; sb . append ( "{" ) ; Iterator it = map . keySet ( ) . iterator ( ) ; boolean doIt = false ; while ( it . hasNext ( ) ) { Object key = it . next ( ) ; if ( doIt ) sb . append ( ',' ) ; doIt = true ; sb . append ( StringUtil . escapeJS ( key . toString ( ) , '"' , charsetEncoder ) ) ; sb . append ( ':' ) ; _serialize ( pc , test , map . get ( key ) , sb , serializeQueryByColumns , done ) ; } sb . append ( '}' ) ; |
public class ClassStats { /** * 参考 sun . jvm . hotspot . oops . ObjectHistogramElement */
private String getInternalName ( Klass k ) { } } | ByteArrayOutputStream bos = new ByteArrayOutputStream ( ) ; klass . printValueOn ( new PrintStream ( bos ) ) ; // ' * ' is used to denote VM internal klasses .
return "* " + bos . toString ( ) ; |
public class Message { /** * Removes the subject with the given language from the message .
* @ param language the language of the subject which is to be removed
* @ return true if a subject was removed and false if it was not . */
public boolean removeSubject ( String language ) { } } | language = determineLanguage ( language ) ; for ( Subject subject : subjects ) { if ( language . equals ( subject . language ) ) { return subjects . remove ( subject ) ; } } return false ; |
public class Node { /** * Frees all the slots for a topology .
* @ param topId the topology to free slots for
* @ param cluster the cluster to update */
public void freeTopology ( String topId , Cluster cluster ) { } } | Set < WorkerSlot > slots = _topIdToUsedSlots . get ( topId ) ; if ( slots == null || slots . isEmpty ( ) ) return ; for ( WorkerSlot ws : slots ) { cluster . freeSlot ( ws ) ; if ( _isAlive ) { _freeSlots . add ( ws ) ; } } _topIdToUsedSlots . remove ( topId ) ; |
public class Document { void replaceC4Document ( C4Document c4doc ) { } } | synchronized ( lock ) { final C4Document oldDoc = this . c4doc ; this . c4doc = c4doc ; if ( oldDoc != this . c4doc ) { if ( this . c4doc != null ) { this . c4doc . retain ( ) ; } if ( oldDoc != null ) { oldDoc . release ( ) ; // oldDoc should be retained .
} } } |
public class RedisInner { /** * Export data from the redis cache to blobs in a container .
* @ param resourceGroupName The name of the resource group .
* @ param name The name of the Redis cache .
* @ param parameters Parameters for Redis export operation .
* @ param serviceCallback the async ServiceCallback to handle successful and failed responses .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the { @ link ServiceFuture } object */
public ServiceFuture < Void > exportDataAsync ( String resourceGroupName , String name , ExportRDBParameters parameters , final ServiceCallback < Void > serviceCallback ) { } } | return ServiceFuture . fromResponse ( exportDataWithServiceResponseAsync ( resourceGroupName , name , parameters ) , serviceCallback ) ; |
public class HarrisFast { /** * 是否为8邻域中的极大值
* @ param hmap
* @ param x
* @ param y
* @ return */
private boolean isSpatialMaxima ( float [ ] [ ] hmap , int x , int y ) { } } | int n = 8 ; int [ ] dx = new int [ ] { - 1 , 0 , 1 , 1 , 1 , 0 , - 1 , - 1 } ; int [ ] dy = new int [ ] { - 1 , - 1 , - 1 , 0 , 1 , 1 , 1 , 0 } ; double w = hmap [ x ] [ y ] ; for ( int i = 0 ; i < n ; i ++ ) { double wk = hmap [ x + dx [ i ] ] [ y + dy [ i ] ] ; if ( wk >= w ) return false ; } return true ; |
public class ProjectManager { /** * DEPRECATED . use membership . delete ( ) instead . */
@ Deprecated public void deleteProjectMembership ( Membership membership ) throws RedmineException { } } | transport . deleteObject ( Membership . class , membership . getId ( ) . toString ( ) ) ; |
public class Utils { /** * assignable index ; */
private static void quickSort ( /* @ non _ null @ */
double [ ] array , /* @ non _ null @ */
int [ ] index , int left , int right ) { } } | if ( left < right ) { int middle = partition ( array , index , left , right ) ; quickSort ( array , index , left , middle ) ; quickSort ( array , index , middle + 1 , right ) ; } |
public class GetLifecyclePolicyPreviewResult { /** * The results of the lifecycle policy preview request .
* < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use
* { @ link # setPreviewResults ( java . util . Collection ) } or { @ link # withPreviewResults ( java . util . Collection ) } if you want
* to override the existing values .
* @ param previewResults
* The results of the lifecycle policy preview request .
* @ return Returns a reference to this object so that method calls can be chained together . */
public GetLifecyclePolicyPreviewResult withPreviewResults ( LifecyclePolicyPreviewResult ... previewResults ) { } } | if ( this . previewResults == null ) { setPreviewResults ( new java . util . ArrayList < LifecyclePolicyPreviewResult > ( previewResults . length ) ) ; } for ( LifecyclePolicyPreviewResult ele : previewResults ) { this . previewResults . add ( ele ) ; } return this ; |
public class ResourceadapterImpl { /** * Returns all < code > authentication - mechanism < / code > elements
* @ return list of < code > authentication - mechanism < / code > */
public List < AuthenticationMechanism < Resourceadapter < T > > > getAllAuthenticationMechanism ( ) { } } | List < AuthenticationMechanism < Resourceadapter < T > > > list = new ArrayList < AuthenticationMechanism < Resourceadapter < T > > > ( ) ; List < Node > nodeList = childNode . get ( "authentication-mechanism" ) ; for ( Node node : nodeList ) { AuthenticationMechanism < Resourceadapter < T > > type = new AuthenticationMechanismImpl < Resourceadapter < T > > ( this , "authentication-mechanism" , childNode , node ) ; list . add ( type ) ; } return list ; |
public class Year { /** * Obtains an instance of { @ code Year } from a temporal object .
* This obtains a year based on the specified temporal .
* A { @ code TemporalAccessor } represents an arbitrary set of date and time information ,
* which this factory converts to an instance of { @ code Year } .
* The conversion extracts the { @ link ChronoField # YEAR year } field .
* The extraction is only permitted if the temporal object has an ISO
* chronology , or can be converted to a { @ code LocalDate } .
* This method matches the signature of the functional interface { @ link TemporalQuery }
* allowing it to be used as a query via method reference , { @ code Year : : from } .
* @ param temporal the temporal object to convert , not null
* @ return the year , not null
* @ throws DateTimeException if unable to convert to a { @ code Year } */
public static Year from ( TemporalAccessor temporal ) { } } | if ( temporal instanceof Year ) { return ( Year ) temporal ; } Objects . requireNonNull ( temporal , "temporal" ) ; try { if ( IsoChronology . INSTANCE . equals ( Chronology . from ( temporal ) ) == false ) { temporal = LocalDate . from ( temporal ) ; } return of ( temporal . get ( YEAR ) ) ; } catch ( DateTimeException ex ) { throw new DateTimeException ( "Unable to obtain Year from TemporalAccessor: " + temporal + " of type " + temporal . getClass ( ) . getName ( ) , ex ) ; } |
public class InterceptorTypeImpl { /** * Returns all < code > administered - object < / code > elements
* @ return list of < code > administered - object < / code > */
public List < AdministeredObjectType < InterceptorType < T > > > getAllAdministeredObject ( ) { } } | List < AdministeredObjectType < InterceptorType < T > > > list = new ArrayList < AdministeredObjectType < InterceptorType < T > > > ( ) ; List < Node > nodeList = childNode . get ( "administered-object" ) ; for ( Node node : nodeList ) { AdministeredObjectType < InterceptorType < T > > type = new AdministeredObjectTypeImpl < InterceptorType < T > > ( this , "administered-object" , childNode , node ) ; list . add ( type ) ; } return list ; |
public class Predicates { /** * Inverts the sense of the given child predicate . In SQL terms , this
* surrounds the given predicate with " not ( . . . ) " .
* @ param childPredicate
* Predicate whose sense is to be inverted . */
public static Predicate not ( final Predicate childPredicate ) { } } | return new Predicate ( ) { public void init ( AbstractSqlCreator creator ) { childPredicate . init ( creator ) ; } public String toSql ( ) { return "not (" + childPredicate . toSql ( ) + ")" ; } } ; |
public class ServiceLocator { /** * will get the ejb Remote home factory . clients need to cast to the type of
* EJBHome they desire
* @ return the EJB Home corresponding to the homeName */
public EJBHome getRemoteHome ( String jndiHomeName , Class className ) throws ServiceLocatorException { } } | EJBHome home = null ; try { Object objref = ic . lookup ( jndiHomeName ) ; Object obj = PortableRemoteObject . narrow ( objref , className ) ; home = ( EJBHome ) obj ; } catch ( NamingException ne ) { throw new ServiceLocatorException ( ne ) ; } catch ( Exception e ) { throw new ServiceLocatorException ( e ) ; } return home ; |
public class AsyncWork { /** * Forward the result , error , or cancellation to the given AsyncWork . */
public final void listenInlineGenericError ( AsyncWork < T , Exception > sp ) { } } | listenInline ( new AsyncWorkListener < T , TError > ( ) { @ Override public void ready ( T result ) { sp . unblockSuccess ( result ) ; } @ Override public void error ( TError error ) { sp . unblockError ( error ) ; } @ Override public void cancelled ( CancelException event ) { sp . unblockCancel ( event ) ; } } ) ; |
public class TypeSignature { /** * Returns internal signature of a parameter . */
private String getParamJVMSignature ( String paramsig ) throws SignatureException { } } | String paramJVMSig = "" ; String componentType = "" ; if ( paramsig != null ) { if ( paramsig . contains ( "[]" ) ) { // Gets array dimension .
int endindex = paramsig . indexOf ( "[]" ) ; componentType = paramsig . substring ( 0 , endindex ) ; String dimensionString = paramsig . substring ( endindex ) ; if ( dimensionString != null ) { while ( dimensionString . contains ( "[]" ) ) { paramJVMSig += "[" ; int beginindex = dimensionString . indexOf ( "]" ) + 1 ; if ( beginindex < dimensionString . length ( ) ) { dimensionString = dimensionString . substring ( beginindex ) ; } else dimensionString = "" ; } } } else componentType = paramsig ; paramJVMSig += getComponentType ( componentType ) ; } return paramJVMSig ; |
public class URLExtensions { /** * Gets the filename from the given url object .
* @ param url
* the url
* @ return the filename
* @ throws UnsupportedEncodingException
* the unsupported encoding exception */
public static String getFilename ( final URL url ) throws UnsupportedEncodingException { } } | if ( isJar ( url ) || isEar ( url ) ) { String fileName = URLDecoder . decode ( url . getFile ( ) , "UTF-8" ) ; fileName = fileName . substring ( 5 , fileName . indexOf ( "!" ) ) ; return fileName ; } return URLDecoder . decode ( url . getFile ( ) , "UTF-8" ) ; |
public class ProtoParser { /** * com / dyuproject / protostuff / parser / ProtoParser . g : 641:1 : ignore _ block : LEFTCURLY ( ignore _ block _ body ) * RIGHTCURLY ; */
public final ProtoParser . ignore_block_return ignore_block ( ) throws RecognitionException { } } | ProtoParser . ignore_block_return retval = new ProtoParser . ignore_block_return ( ) ; retval . start = input . LT ( 1 ) ; Object root_0 = null ; Token LEFTCURLY166 = null ; Token RIGHTCURLY168 = null ; ProtoParser . ignore_block_body_return ignore_block_body167 = null ; Object LEFTCURLY166_tree = null ; Object RIGHTCURLY168_tree = null ; try { // com / dyuproject / protostuff / parser / ProtoParser . g : 642:5 : ( LEFTCURLY ( ignore _ block _ body ) * RIGHTCURLY )
// com / dyuproject / protostuff / parser / ProtoParser . g : 642:9 : LEFTCURLY ( ignore _ block _ body ) * RIGHTCURLY
{ root_0 = ( Object ) adaptor . nil ( ) ; LEFTCURLY166 = ( Token ) match ( input , LEFTCURLY , FOLLOW_LEFTCURLY_in_ignore_block2623 ) ; if ( state . failed ) return retval ; if ( state . backtracking == 0 ) { LEFTCURLY166_tree = ( Object ) adaptor . create ( LEFTCURLY166 ) ; adaptor . addChild ( root_0 , LEFTCURLY166_tree ) ; } // com / dyuproject / protostuff / parser / ProtoParser . g : 642:19 : ( ignore _ block _ body ) *
loop38 : do { int alt38 = 2 ; switch ( input . LA ( 1 ) ) { case ASSIGN : case AT : case LEFTCURLY : case LEFTPAREN : case RIGHTPAREN : case LEFTSQUARE : case RIGHTSQUARE : case SEMICOLON : case COMMA : case PLUS : case MINUS : case TO : case TRUE : case FALSE : case PKG : case SYNTAX : case IMPORT : case OPTION : case MESSAGE : case SERVICE : case ENUM : case REQUIRED : case OPTIONAL : case REPEATED : case EXTENSIONS : case EXTEND : case GROUP : case RPC : case RETURNS : case INT32 : case INT64 : case UINT32 : case UINT64 : case SINT32 : case SINT64 : case FIXED32 : case FIXED64 : case SFIXED32 : case SFIXED64 : case FLOAT : case DOUBLE : case BOOL : case STRING : case BYTES : case DEFAULT : case MAX : case VOID : case ID : case FULL_ID : case NUMINT : case EXP : case NUMFLOAT : case NUMDOUBLE : case HEX_DIGIT : case HEX : case OCTAL : case COMMENT : case WS : case ESC_SEQ : case STRING_LITERAL : case UNICODE_ESC : case OCTAL_ESC : { alt38 = 1 ; } break ; } switch ( alt38 ) { case 1 : // com / dyuproject / protostuff / parser / ProtoParser . g : 642:19 : ignore _ block _ body
{ pushFollow ( FOLLOW_ignore_block_body_in_ignore_block2625 ) ; ignore_block_body167 = ignore_block_body ( ) ; state . _fsp -- ; if ( state . failed ) return retval ; if ( state . backtracking == 0 ) adaptor . addChild ( root_0 , ignore_block_body167 . getTree ( ) ) ; } break ; default : break loop38 ; } } while ( true ) ; RIGHTCURLY168 = ( Token ) match ( input , RIGHTCURLY , FOLLOW_RIGHTCURLY_in_ignore_block2628 ) ; if ( state . failed ) return retval ; if ( state . backtracking == 0 ) { RIGHTCURLY168_tree = ( Object ) adaptor . create ( RIGHTCURLY168 ) ; adaptor . addChild ( root_0 , RIGHTCURLY168_tree ) ; } } retval . stop = input . LT ( - 1 ) ; if ( state . backtracking == 0 ) { retval . tree = ( Object ) adaptor . rulePostProcessing ( root_0 ) ; adaptor . setTokenBoundaries ( retval . tree , retval . start , retval . stop ) ; } } catch ( RecognitionException re ) { reportError ( re ) ; recover ( input , re ) ; retval . tree = ( Object ) adaptor . errorNode ( input , retval . start , input . LT ( - 1 ) , re ) ; } finally { } return retval ; |
public class RaidNode { /** * check if the file is already raided by high priority codec */
public static boolean raidedByOtherHighPriCodec ( Configuration conf , FileStatus stat , Codec codec ) throws IOException { } } | for ( Codec tcodec : Codec . getCodecs ( ) ) { if ( tcodec . priority > codec . priority ) { if ( stat . isDir ( ) && ! tcodec . isDirRaid ) { // A directory could not be raided by a file level codec .
continue ; } // check if high priority parity file exists .
if ( ParityFilePair . parityExists ( stat , tcodec , conf ) ) { InjectionHandler . processEvent ( InjectionEvent . RAID_ENCODING_SKIP_PATH ) ; return true ; } } } return false ; |
public class SpannerClient { /** * Commits a transaction . The request includes the mutations to be applied to rows in the
* database .
* < p > ` Commit ` might return an ` ABORTED ` error . This can occur at any time ; commonly , the cause is
* conflicts with concurrent transactions . However , it can also happen for a variety of other
* reasons . If ` Commit ` returns ` ABORTED ` , the caller should re - attempt the transaction from the
* beginning , re - using the same session .
* < p > Sample code :
* < pre > < code >
* try ( SpannerClient spannerClient = SpannerClient . create ( ) ) {
* SessionName session = SessionName . of ( " [ PROJECT ] " , " [ INSTANCE ] " , " [ DATABASE ] " , " [ SESSION ] " ) ;
* TransactionOptions singleUseTransaction = TransactionOptions . newBuilder ( ) . build ( ) ;
* List & lt ; Mutation & gt ; mutations = new ArrayList & lt ; & gt ; ( ) ;
* CommitResponse response = spannerClient . commit ( session , singleUseTransaction , mutations ) ;
* < / code > < / pre >
* @ param session Required . The session in which the transaction to be committed is running .
* @ param singleUseTransaction Execute mutations in a temporary transaction . Note that unlike
* commit of a previously - started transaction , commit with a temporary transaction is
* non - idempotent . That is , if the ` CommitRequest ` is sent to Cloud Spanner more than once
* ( for instance , due to retries in the application , or in the transport library ) , it is
* possible that the mutations are executed more than once . If this is undesirable , use
* [ BeginTransaction ] [ google . spanner . v1 . Spanner . BeginTransaction ] and
* [ Commit ] [ google . spanner . v1 . Spanner . Commit ] instead .
* @ param mutations The mutations to be executed when this transaction commits . All mutations are
* applied atomically , in the order they appear in this list .
* @ throws com . google . api . gax . rpc . ApiException if the remote call fails */
public final CommitResponse commit ( SessionName session , TransactionOptions singleUseTransaction , List < Mutation > mutations ) { } } | CommitRequest request = CommitRequest . newBuilder ( ) . setSession ( session == null ? null : session . toString ( ) ) . setSingleUseTransaction ( singleUseTransaction ) . addAllMutations ( mutations ) . build ( ) ; return commit ( request ) ; |
public class CommandLine { /** * Retrieve the first argument , if any , of this option .
* @ param opt the name of the option
* @ return Value of the argument if option is set , and has an argument ,
* otherwise null . */
public String getOptionValue ( String opt ) { } } | String [ ] values = getOptionValues ( opt ) ; return ( values == null ) ? null : values [ 0 ] ; |
public class ImageSizeImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
public void setVRESOL ( Integer newVRESOL ) { } } | Integer oldVRESOL = vresol ; vresol = newVRESOL ; if ( eNotificationRequired ( ) ) eNotify ( new ENotificationImpl ( this , Notification . SET , AfplibPackage . IMAGE_SIZE__VRESOL , oldVRESOL , vresol ) ) ; |
public class AbstractErrorWebExceptionHandler { /** * Render a default HTML " Whitelabel Error Page " .
* Useful when no other error view is available in the application .
* @ param responseBody the error response being built
* @ param error the error data as a map
* @ return a Publisher of the { @ link ServerResponse } */
protected Mono < ServerResponse > renderDefaultErrorView ( ServerResponse . BodyBuilder responseBody , Map < String , Object > error ) { } } | StringBuilder builder = new StringBuilder ( ) ; Date timestamp = ( Date ) error . get ( "timestamp" ) ; Object message = error . get ( "message" ) ; Object trace = error . get ( "trace" ) ; Object requestId = error . get ( "requestId" ) ; builder . append ( "<html><body><h1>Whitelabel Error Page</h1>" ) . append ( "<p>This application has no configured error view, so you are seeing this as a fallback.</p>" ) . append ( "<div id='created'>" ) . append ( timestamp ) . append ( "</div>" ) . append ( "<div>[" ) . append ( requestId ) . append ( "] There was an unexpected error (type=" ) . append ( htmlEscape ( error . get ( "error" ) ) ) . append ( ", status=" ) . append ( htmlEscape ( error . get ( "status" ) ) ) . append ( ").</div>" ) ; if ( message != null ) { builder . append ( "<div>" ) . append ( htmlEscape ( message ) ) . append ( "</div>" ) ; } if ( trace != null ) { builder . append ( "<div style='white-space:pre-wrap;'>" ) . append ( htmlEscape ( trace ) ) . append ( "</div>" ) ; } builder . append ( "</body></html>" ) ; return responseBody . syncBody ( builder . toString ( ) ) ; |
public class Assert { /** * Asserts that two doubles are equal to within a positive delta .
* If they are not , an { @ link AssertionError } is thrown with the given
* message . If the expected value is infinity then the delta value is
* ignored . NaNs are considered equal :
* < code > assertEquals ( Double . NaN , Double . NaN , * ) < / code > passes
* @ param message the identifying message for the { @ link AssertionError } ( < code > null < / code >
* okay )
* @ param expected expected value
* @ param actual the value to check against < code > expected < / code >
* @ param delta the maximum delta between < code > expected < / code > and
* < code > actual < / code > for which both numbers are still
* considered equal . */
public static void assertEquals ( String message , double expected , double actual , double delta ) { } } | if ( doubleIsDifferent ( expected , actual , delta ) ) { failNotEquals ( message , Double . valueOf ( expected ) , Double . valueOf ( actual ) ) ; } |
public class CredentialsRestConnection { /** * Gets the cookie for the Authorized connection to the Hub server . Returns the response code from the connection . */
@ Override public void authenticateWithBlackduck ( ) throws IntegrationException { } } | final URL securityUrl ; try { securityUrl = new URL ( getBaseUrl ( ) , "j_spring_security_check" ) ; } catch ( final MalformedURLException e ) { throw new IntegrationException ( "Error constructing the login URL: " + e . getMessage ( ) , e ) ; } if ( StringUtils . isNotBlank ( hubUsername ) && StringUtils . isNotBlank ( hubPassword ) ) { final List < NameValuePair > bodyValues = new ArrayList < > ( ) ; bodyValues . add ( new BasicNameValuePair ( "j_username" , hubUsername ) ) ; bodyValues . add ( new BasicNameValuePair ( "j_password" , hubPassword ) ) ; final UrlEncodedFormEntity entity = new UrlEncodedFormEntity ( bodyValues , Charsets . UTF_8 ) ; final RequestBuilder requestBuilder = createRequestBuilder ( HttpMethod . POST , null ) ; requestBuilder . setCharset ( Charsets . UTF_8 ) ; requestBuilder . setUri ( securityUrl . toString ( ) ) ; requestBuilder . setEntity ( entity ) ; final HttpUriRequest request = requestBuilder . build ( ) ; logRequestHeaders ( request ) ; try ( final CloseableHttpResponse closeableHttpResponse = getClient ( ) . execute ( request ) ) { logResponseHeaders ( closeableHttpResponse ) ; final Response response = new Response ( closeableHttpResponse ) ; final int statusCode = closeableHttpResponse . getStatusLine ( ) . getStatusCode ( ) ; final String statusMessage = closeableHttpResponse . getStatusLine ( ) . getReasonPhrase ( ) ; if ( statusCode < RestConstants . OK_200 || statusCode >= RestConstants . MULT_CHOICE_300 ) { final String httpResponseContent = response . getContentString ( ) ; throw new IntegrationRestException ( statusCode , statusMessage , httpResponseContent , String . format ( "Connection Error: %s %s" , statusCode , statusMessage ) ) ; } else { // get the CSRF token
final Header csrfToken = closeableHttpResponse . getFirstHeader ( RestConstants . X_CSRF_TOKEN ) ; if ( csrfToken != null ) { addCommonRequestHeader ( RestConstants . X_CSRF_TOKEN , csrfToken . getValue ( ) ) ; } else { logger . error ( "No CSRF token found when authenticating" ) ; } } } catch ( final IOException e ) { throw new IntegrationException ( e . getMessage ( ) , e ) ; } } |
public class BeanRepository { /** * Returns a new { @ code prototype } Bean , created by the given Function . It is possible to pass Parameter
* to the Constructor of the Bean , and determine Dependencies with the { @ link BeanAccessor } . The Method
* { @ link PostConstructible # onPostConstruct ( BeanRepository ) } is executed for every Call of this Method , if
* the Interface is implemented .
* @ param creator The Code to create the new Object
* @ param param1 The 1st Parameter
* @ param < T > The Type of the Bean
* @ param < P1 > The Type of the 1st Parameter
* @ see BeanAccessor
* @ see PostConstructible
* @ return a new created Object */
public < T , P1 > T getPrototypeBean ( final ConstructorWithBeansAnd1Parameter < T , P1 > creator , final P1 param1 ) { } } | final PrototypeProvider provider = new PrototypeProvider ( name , beans -> creator . create ( beans , param1 ) ) ; return provider . getBean ( this , dryRun ) ; |
public class LargeList { /** * Delete values from list .
* @ param values A list of values to delete */
public void remove ( List < Value > values ) { } } | Key [ ] keys = makeSubKeys ( values ) ; List < byte [ ] > digestList = getDigestList ( ) ; // int startIndex = digestList . IndexOf ( subKey . digest ) ;
// int count = values . Count ;
// foreach ( Key key in keys ) {
// client . Delete ( this . policy , key ) ;
// client . Operate ( this . policy , this . key , ListOperation . Remove ( this . binNameString , startIndex , count ) ) ;
for ( Key key : keys ) { client . delete ( this . policy , key ) ; digestList . remove ( key . digest ) ; } client . put ( this . policy , this . key , new Bin ( this . binNameString , digestList ) ) ; |
public class I18nObject { /** * Gets the internationalized value for the supplied message key , using a file as additional information .
* @ param aMessageKey A message key
* @ param aFile Additional details for the message
* @ return The internationalized message */
protected String getI18n ( final String aMessageKey , final File aFile ) { } } | return StringUtils . normalizeWS ( myBundle . get ( aMessageKey , aFile . getAbsolutePath ( ) ) ) ; |
public class MeshArbiter { /** * Convenience wrapper for tests that don ' t care about unknown sites */
Map < Long , Long > reconfigureOnFault ( Set < Long > hsIds , FaultMessage fm ) { } } | return reconfigureOnFault ( hsIds , fm , new HashSet < Long > ( ) ) ; |
public class CouchbaseClient { /** * ( non - Javadoc )
* @ see com . impetus . kundera . client . ClientBase # onPersist ( com . impetus . kundera .
* metadata . model . EntityMetadata , java . lang . Object , java . lang . Object ,
* java . util . List ) */
@ Override protected void onPersist ( EntityMetadata entityMetadata , Object entity , Object id , List < RelationHolder > rlHolders ) { } } | JsonDocument doc = handler . getDocumentFromEntity ( entityMetadata , entity , kunderaMetadata ) ; if ( ! isUpdate ) { bucket . insert ( doc ) ; LOGGER . debug ( "Inserted document with ID : " + doc . id ( ) + " in the " + bucket . name ( ) + " Bucket" ) ; } else { bucket . upsert ( doc ) ; LOGGER . debug ( "Updated document with ID : " + doc . id ( ) + " in the " + bucket . name ( ) + " Bucket" ) ; } |
public class EthiopicDate { /** * Obtains a { @ code EthiopicDate } representing a date in the Ethiopic calendar
* system from the epoch - day .
* @ param epochDay the epoch day to convert based on 1970-01-01 ( ISO )
* @ return the date in Ethiopic calendar system , not null
* @ throws DateTimeException if the epoch - day is out of range */
static EthiopicDate ofEpochDay ( final long epochDay ) { } } | EPOCH_DAY . range ( ) . checkValidValue ( epochDay , EPOCH_DAY ) ; // validate outer bounds
long ethiopicED = epochDay + EPOCH_DAY_DIFFERENCE ; int adjustment = 0 ; if ( ethiopicED < 0 ) { ethiopicED = ethiopicED + ( 1461L * ( 1_000_000L / 4 ) ) ; adjustment = - 1_000_000 ; } int prolepticYear = ( int ) ( ( ( ethiopicED * 4 ) + 1463 ) / 1461 ) ; int startYearEpochDay = ( prolepticYear - 1 ) * 365 + ( prolepticYear / 4 ) ; int doy0 = ( int ) ( ethiopicED - startYearEpochDay ) ; int month = doy0 / 30 + 1 ; int dom = doy0 % 30 + 1 ; return new EthiopicDate ( prolepticYear + adjustment , month , dom ) ; |
public class Consortium { /** * Returns for given parameter < i > _ name < / i > the instance of class
* { @ link Consortium } .
* @ param _ name name to search in the cache
* @ return instance of class { @ link Consortium }
* @ throws CacheReloadException on error
* @ see # getCache */
public static Consortium get ( final String _name ) throws CacheReloadException { } } | final Cache < String , Consortium > cache = InfinispanCache . get ( ) . < String , Consortium > getCache ( Consortium . IDCACHE ) ; if ( ! cache . containsKey ( _name ) && ! Consortium . getConsortiumFromDB ( Consortium . SQL_NAME , _name ) ) { cache . put ( _name , Consortium . NULL , 100 , TimeUnit . SECONDS ) ; } final Consortium ret = cache . get ( _name ) ; return ret . equals ( Consortium . NULL ) ? null : ret ; |
public class QueueListenerFactory { /** * Shut down the { @ link QueueListenerFactory } and close all open connections . Shared clients are not shut down by this
* method . The instance should be discarded after calling shutdown .
* @ param quietPeriod the quiet period as described in the documentation
* @ param timeout the maximum amount of time to wait until the executor is shutdown regardless if a task was submitted
* during the quiet period
* @ param timeUnit the unit of { @ code quietPeriod } and { @ code timeout } */
public void shutdown ( long quietPeriod , long timeout , TimeUnit timeUnit ) { } } | // disable all resources to benefit from concurrent shutdowns
for ( QueueListener < K , V > resource : resources ) { resource . disable ( ) ; } for ( QueueListener < K , V > resource : resources ) { resource . close ( timeout , timeUnit ) ; } resources . clear ( ) ; if ( ! sharedClient ) { disqueClient . shutdown ( quietPeriod , timeout , timeUnit ) ; } |
public class ParsedPath { /** * Create a path that is expected to represent a path to any entry .
* This is different from { @ link # toDirectory ( String ) } in that if the path ends with a delimiter ' / ' ,
* it is < b > not < / b > considered the same as if it didn ' t .
* i . e . path / to would be a path with 2 elements : ' path ' and ' to ' , but
* but path / to / would be a path with 3 elements : ' path ' , ' to ' and an empty element ' ' .
* @ param rawPath Path to parse .
* @ return A { @ link ParsedPath } out of the given path .
* @ throws IllegalArgumentException If any element along the path except the last one is empty . */
public static ParsedPath toEntry ( String rawPath ) { } } | final String path = rawPath . trim ( ) ; final boolean startsWithDelimiter = path . startsWith ( "/" ) ; // Keep the trailing delimiter .
// This allows us to treat paths that end with a delimiter differently from paths that don ' t .
// i . e . path / to would be a path with 2 elements : ' path ' and ' to ' , but
// but path / to / would be a path with 3 elements : ' path ' , ' to ' and an empty element ' ' .
final List < String > pathElements = splitPath ( path , true ) ; return new ParsedPath ( startsWithDelimiter , pathElements ) ; |
public class PolicyTaskFutureImpl { /** * Invoked to indicate the task was successfully submitted .
* @ param runOnSubmitter true if accepted to run immediately on the submitter ' s thread . False if accepted to the queue . */
@ Trivial final void accept ( boolean runOnSubmitter ) { } } | long time ; nsAcceptEnd = time = System . nanoTime ( ) ; if ( runOnSubmitter ) nsQueueEnd = time ; state . setSubmitted ( ) ; |
public class ApiOvhDbaaslogs { /** * Returns details of specified archive
* REST : GET / dbaas / logs / { serviceName } / output / graylog / stream / { streamId } / archive / { archiveId }
* @ param serviceName [ required ] Service name
* @ param streamId [ required ] Stream ID
* @ param archiveId [ required ] Archive ID */
public OvhArchive serviceName_output_graylog_stream_streamId_archive_archiveId_GET ( String serviceName , String streamId , String archiveId ) throws IOException { } } | String qPath = "/dbaas/logs/{serviceName}/output/graylog/stream/{streamId}/archive/{archiveId}" ; StringBuilder sb = path ( qPath , serviceName , streamId , archiveId ) ; String resp = exec ( qPath , "GET" , sb . toString ( ) , null ) ; return convertTo ( resp , OvhArchive . class ) ; |
public class TableRef { /** * Adds a new item to the table .
* < pre >
* StorageRef storage = new StorageRef ( " your _ app _ key " , " your _ token " ) ;
* TableRef tableRef = storage . table ( " your _ table " ) ;
* LinkedHashMap & ltString , ItemAttribute > lhm = new LinkedHashMap & ltString , ItemAttribute > ( ) ;
* / / Put elements to the map
* lhm . put ( " your _ primary _ key " , new ItemAttribute ( " new _ primary _ key _ value " ) ) ;
* lhm . put ( " your _ secondary _ key " , new ItemAttribute ( " new _ secondary _ key _ value " ) ) ;
* lhm . put ( " itemProperty " , new ItemAttribute ( " new _ itemproperty _ value " ) ) ;
* tableRef . push ( lhm , new OnItemSnapshot ( ) {
* & # 064 ; Override
* public void run ( ItemSnapshot itemSnapshot ) {
* if ( itemSnapshot ! = null ) {
* Log . d ( " TableRef " , " Item inserted : " + itemSnapshot . val ( ) ) ;
* } , new OnError ( ) {
* & # 064 ; Override
* public void run ( Integer integer , String errorMessage ) {
* Log . e ( " TableRef " , " Error inserting item : " + errorMessage ) ;
* < / pre >
* @ param item
* The item to add
* @ param onItemSnapshot
* The callback to run once the insertion is done .
* @ param onError
* The callback to call if an exception occurred
* @ return Current table reference */
public TableRef push ( LinkedHashMap < String , ItemAttribute > item , OnItemSnapshot onItemSnapshot , OnError onError ) { } } | PostBodyBuilder pbb = new PostBodyBuilder ( context ) ; pbb . addObject ( "table" , this . name ) ; pbb . addObject ( "item" , item ) ; Rest r = new Rest ( context , RestType . PUTITEM , pbb , this ) ; r . onError = onError ; r . onItemSnapshot = onItemSnapshot ; context . processRest ( r ) ; return this ; |
public class AWSCloudHSMClient { /** * This is documentation for < b > AWS CloudHSM Classic < / b > . For more information , see < a
* href = " http : / / aws . amazon . com / cloudhsm / faqs - classic / " > AWS CloudHSM Classic FAQs < / a > , the < a
* href = " http : / / docs . aws . amazon . com / cloudhsm / classic / userguide / " > AWS CloudHSM Classic User Guide < / a > , and the < a
* href = " http : / / docs . aws . amazon . com / cloudhsm / classic / APIReference / " > AWS CloudHSM Classic API Reference < / a > .
* < b > For information about the current version of AWS CloudHSM < / b > , see < a
* href = " http : / / aws . amazon . com / cloudhsm / " > AWS CloudHSM < / a > , the < a
* href = " http : / / docs . aws . amazon . com / cloudhsm / latest / userguide / " > AWS CloudHSM User Guide < / a > , and the < a
* href = " http : / / docs . aws . amazon . com / cloudhsm / latest / APIReference / " > AWS CloudHSM API Reference < / a > .
* Creates an HSM client .
* @ param createLunaClientRequest
* Contains the inputs for the < a > CreateLunaClient < / a > action .
* @ return Result of the CreateLunaClient operation returned by the service .
* @ throws CloudHsmServiceException
* Indicates that an exception occurred in the AWS CloudHSM service .
* @ throws CloudHsmInternalException
* Indicates that an internal error occurred .
* @ throws InvalidRequestException
* Indicates that one or more of the request parameters are not valid .
* @ sample AWSCloudHSM . CreateLunaClient
* @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / cloudhsm - 2014-05-30 / CreateLunaClient " target = " _ top " > AWS API
* Documentation < / a > */
@ Override public CreateLunaClientResult createLunaClient ( CreateLunaClientRequest request ) { } } | request = beforeClientExecution ( request ) ; return executeCreateLunaClient ( request ) ; |
public class Node { /** * Creates an absolute link dest pointing to this . The signature of this method resembles the copy method .
* @ param dest link to be created
* @ return dest ; */
public T link ( T dest ) throws LinkException { } } | if ( ! getClass ( ) . equals ( dest . getClass ( ) ) ) { throw new IllegalArgumentException ( this . getClass ( ) + " vs " + dest . getClass ( ) ) ; } try { checkExists ( ) ; } catch ( IOException e ) { throw new LinkException ( this , e ) ; } // TODO : getRoot ( ) for ssh root . . .
dest . mklink ( Filesystem . SEPARATOR_STRING + this . getPath ( ) ) ; return dest ; |
public class HookParams { /** * Returns an unmodifiable multimap of the params , where the
* key is the param type and the value is the actual instance */
public ListMultimap < Class < ? > , Object > getParamsForType ( ) { } } | ArrayListMultimap < Class < ? > , Object > retVal = ArrayListMultimap . create ( ) ; myParams . entries ( ) . forEach ( entry -> retVal . put ( entry . getKey ( ) , unwrapValue ( entry . getValue ( ) ) ) ) ; return Multimaps . unmodifiableListMultimap ( retVal ) ; |
public class AdHocCommandManager { /** * Creates a new instance of a command to be used by a new execution request
* @ param commandNode the command node that identifies it .
* @ param sessionID the session id of this execution .
* @ return the command instance to execute .
* @ throws XMPPErrorException if there is problem creating the new instance .
* @ throws SecurityException
* @ throws NoSuchMethodException
* @ throws InvocationTargetException
* @ throws IllegalArgumentException
* @ throws IllegalAccessException
* @ throws InstantiationException */
private LocalCommand newInstanceOfCmd ( String commandNode , String sessionID ) throws XMPPErrorException , InstantiationException , IllegalAccessException , IllegalArgumentException , InvocationTargetException , NoSuchMethodException , SecurityException { } } | AdHocCommandInfo commandInfo = commands . get ( commandNode ) ; LocalCommand command = commandInfo . getCommandInstance ( ) ; command . setSessionID ( sessionID ) ; command . setName ( commandInfo . getName ( ) ) ; command . setNode ( commandInfo . getNode ( ) ) ; return command ; |
public class RegexMatcher { /** * Add expression .
* @ param expr
* @ param attach
* @ param options
* @ return */
public RegexMatcher addExpression ( String expr , T attach , Option ... options ) { } } | if ( nfa == null ) { nfa = parser . createNFA ( nfaScope , expr , attach , options ) ; } else { NFA < T > nfa2 = parser . createNFA ( nfaScope , expr , attach , options ) ; nfa = new NFA < > ( nfaScope , nfa , nfa2 ) ; } return this ; |
public class JsonSerializationContext { /** * Trace an error and returns a corresponding exception .
* @ param value current value
* @ param message error message
* @ return a { @ link JsonSerializationException } with the given message */
public JsonSerializationException traceError ( Object value , String message ) { } } | getLogger ( ) . log ( Level . SEVERE , message ) ; return new JsonSerializationException ( message ) ; |
public class DaoUnit { /** * 打印sql语句 , 它不会将sql执行 , 只是打印sql语句 。
* 仅供内部测试使用
* @ param daoUnitClass unit class
* @ param map parameter map */
public static void logSql ( Class daoUnitClass , Map < String , Object > map ) { } } | XianConnection connection = PoolFactory . getPool ( ) . getMasterDatasource ( ) . getConnection ( ) . blockingGet ( ) ; DaoUnit daoUnit ; try { daoUnit = ( DaoUnit ) daoUnitClass . newInstance ( ) ; for ( SqlAction action : daoUnit . getActions ( ) ) { ( ( AbstractSqlAction ) action ) . setConnection ( connection ) ; ( ( AbstractSqlAction ) action ) . setMap ( map ) ; action . logSql ( map ) ; } } catch ( InstantiationException | IllegalAccessException e ) { e . printStackTrace ( ) ; } PoolFactory . getPool ( ) . destroyPoolIfNot ( ) ; |
public class MapType { /** * Create a map block directly without per element validations .
* Internal use by com . facebook . presto . spi . Block only . */
public static Block createMapBlockInternal ( TypeManager typeManager , Type keyType , int startOffset , int positionCount , Optional < boolean [ ] > mapIsNull , int [ ] offsets , Block keyBlock , Block valueBlock , HashTables hashTables ) { } } | // TypeManager caches types . Therefore , it is important that we go through it instead of coming up with the MethodHandles directly .
// BIGINT is chosen arbitrarily here . Any type will do .
MapType mapType = ( MapType ) typeManager . getType ( new TypeSignature ( StandardTypes . MAP , TypeSignatureParameter . of ( keyType . getTypeSignature ( ) ) , TypeSignatureParameter . of ( BIGINT . getTypeSignature ( ) ) ) ) ; return MapBlock . createMapBlockInternal ( startOffset , positionCount , mapIsNull , offsets , keyBlock , valueBlock , hashTables , keyType , mapType . keyBlockNativeEquals , mapType . keyNativeHashCode , mapType . keyBlockHashCode ) ; |
public class EMatrixUtils { /** * Returns the standard deviations of columns .
* @ param matrix The matrix of which the standard deviations of columns to be computed
* @ return A double array of column standard deviations . */
public static double [ ] columnStdDevs ( RealMatrix matrix ) { } } | double [ ] retval = new double [ matrix . getColumnDimension ( ) ] ; for ( int i = 0 ; i < retval . length ; i ++ ) { retval [ i ] = new DescriptiveStatistics ( matrix . getColumn ( i ) ) . getStandardDeviation ( ) ; } return retval ; |
public class JettySetting { /** * 创建HttpConfiguration
* @ return HttpConfiguration */
public static HttpConfiguration createHttpConfiguration ( ) { } } | HttpConfiguration httpConfig = new HttpConfiguration ( ) ; httpConfig . setSecurePort ( setting . getInt ( "secure-port" , "http" , 8443 ) ) ; httpConfig . setOutputBufferSize ( setting . getInt ( "output-buffersize" , "http" , 32768 ) ) ; httpConfig . setRequestHeaderSize ( setting . getInt ( "request-headersize" , "http" , 8192 ) ) ; httpConfig . setResponseHeaderSize ( setting . getInt ( "response-headersize" , "http" , 8192 ) ) ; httpConfig . setSendServerVersion ( true ) ; httpConfig . setSendDateHeader ( false ) ; return httpConfig ; |
public class AbstractDateCellConverterFactory { /** * 文字列をパースして対応するオブジェクトに変換する
* @ param formatter フォーマッタ
* @ param text パース対象の文字列
* @ return パースした結果
* @ throws ParseException */
protected T parseString ( final DateFormat formatter , final String text ) throws ParseException { } } | Date date = formatter . parse ( text ) ; return convertTypeValue ( date ) ; |
public class Endpoint { /** * Convenience factory method to create a Endpoint object from a map of parameters
* @ param client the bandwidth client configuration .
* @ param domainId the domain id .
* @ param params the request parameters .
* @ return the created endpoint .
* @ throws AppPlatformException API Exception
* @ throws ParseException Error parsing data
* @ throws Exception error */
public static Endpoint create ( final BandwidthClient client , final String domainId , final Map < String , Object > params ) throws AppPlatformException , ParseException , Exception { } } | assert ( client != null && params != null ) ; final String endpointsUri = String . format ( client . getUserResourceUri ( BandwidthConstants . ENDPOINTS_URI_PATH ) , domainId ) ; final RestResponse response = client . post ( endpointsUri , params ) ; final JSONObject callObj = toJSONObject ( client . get ( response . getLocation ( ) , null ) ) ; return new Endpoint ( client , callObj ) ; |
public class ApplicationsInner { /** * Lists all of the applications for the HDInsight cluster .
* ServiceResponse < PageImpl < ApplicationInner > > * @ param resourceGroupName The name of the resource group .
* ServiceResponse < PageImpl < ApplicationInner > > * @ param clusterName The name of the cluster .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the PagedList & lt ; ApplicationInner & gt ; object wrapped in { @ link ServiceResponse } if successful . */
public Observable < ServiceResponse < Page < ApplicationInner > > > listByClusterSinglePageAsync ( final String resourceGroupName , final String clusterName ) { } } | if ( this . client . subscriptionId ( ) == null ) { throw new IllegalArgumentException ( "Parameter this.client.subscriptionId() is required and cannot be null." ) ; } if ( resourceGroupName == null ) { throw new IllegalArgumentException ( "Parameter resourceGroupName is required and cannot be null." ) ; } if ( clusterName == null ) { throw new IllegalArgumentException ( "Parameter clusterName is required and cannot be null." ) ; } if ( this . client . apiVersion ( ) == null ) { throw new IllegalArgumentException ( "Parameter this.client.apiVersion() is required and cannot be null." ) ; } return service . listByCluster ( this . client . subscriptionId ( ) , resourceGroupName , clusterName , this . client . apiVersion ( ) , this . client . acceptLanguage ( ) , this . client . userAgent ( ) ) . flatMap ( new Func1 < Response < ResponseBody > , Observable < ServiceResponse < Page < ApplicationInner > > > > ( ) { @ Override public Observable < ServiceResponse < Page < ApplicationInner > > > call ( Response < ResponseBody > response ) { try { ServiceResponse < PageImpl < ApplicationInner > > result = listByClusterDelegate ( response ) ; return Observable . just ( new ServiceResponse < Page < ApplicationInner > > ( result . body ( ) , result . response ( ) ) ) ; } catch ( Throwable t ) { return Observable . error ( t ) ; } } } ) ; |
public class AmazonEC2Client { /** * Modifies the ID format for the specified resource on a per - region basis . You can specify that resources should
* receive longer IDs ( 17 - character IDs ) when they are created .
* This request can only be used to modify longer ID settings for resource types that are within the opt - in period .
* Resources currently in their opt - in period include : < code > bundle < / code > | < code > conversion - task < / code > |
* < code > customer - gateway < / code > | < code > dhcp - options < / code > | < code > elastic - ip - allocation < / code > |
* < code > elastic - ip - association < / code > | < code > export - task < / code > | < code > flow - log < / code > | < code > image < / code > |
* < code > import - task < / code > | < code > internet - gateway < / code > | < code > network - acl < / code > |
* < code > network - acl - association < / code > | < code > network - interface < / code > | < code > network - interface - attachment < / code >
* | < code > prefix - list < / code > | < code > route - table < / code > | < code > route - table - association < / code > |
* < code > security - group < / code > | < code > subnet < / code > | < code > subnet - cidr - block - association < / code > | < code > vpc < / code >
* | < code > vpc - cidr - block - association < / code > | < code > vpc - endpoint < / code > | < code > vpc - peering - connection < / code > |
* < code > vpn - connection < / code > | < code > vpn - gateway < / code > .
* This setting applies to the IAM user who makes the request ; it does not apply to the entire AWS account . By
* default , an IAM user defaults to the same settings as the root user . If you ' re using this action as the root
* user , then these settings apply to the entire account , unless an IAM user explicitly overrides these settings for
* themselves . For more information , see < a
* href = " https : / / docs . aws . amazon . com / AWSEC2 / latest / UserGuide / resource - ids . html " > Resource IDs < / a > in the < i > Amazon
* Elastic Compute Cloud User Guide < / i > .
* Resources created with longer IDs are visible to all IAM roles and users , regardless of these settings and
* provided that they have permission to use the relevant < code > Describe < / code > command for the resource type .
* @ param modifyIdFormatRequest
* @ return Result of the ModifyIdFormat operation returned by the service .
* @ sample AmazonEC2 . ModifyIdFormat
* @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / ec2-2016-11-15 / ModifyIdFormat " target = " _ top " > AWS API
* Documentation < / a > */
@ Override public ModifyIdFormatResult modifyIdFormat ( ModifyIdFormatRequest request ) { } } | request = beforeClientExecution ( request ) ; return executeModifyIdFormat ( request ) ; |
public class MeterRegistry { /** * Only used by { @ link Timer # builder ( String ) } .
* @ param id The identifier for this timer .
* @ param distributionStatisticConfig Configuration that governs how distribution statistics are computed .
* @ return A new or existing timer . */
Timer timer ( Meter . Id id , DistributionStatisticConfig distributionStatisticConfig , PauseDetector pauseDetectorOverride ) { } } | return registerMeterIfNecessary ( Timer . class , id , distributionStatisticConfig , ( id2 , filteredConfig ) -> { Meter . Id withUnit = id2 . withBaseUnit ( getBaseTimeUnitStr ( ) ) ; return newTimer ( withUnit , filteredConfig . merge ( defaultHistogramConfig ( ) ) , pauseDetectorOverride ) ; } , NoopTimer :: new ) ; |
public class NodeSetDTM { /** * Deletes the component at the specified index . Each component in
* this vector with an index greater or equal to the specified
* index is shifted downward to have an index one smaller than
* the value it had previously .
* @ param i The index of the node to be removed .
* @ throws RuntimeException thrown if this NodeSetDTM is not of
* a mutable type . */
public void removeElementAt ( int i ) { } } | if ( ! m_mutable ) throw new RuntimeException ( XSLMessages . createXPATHMessage ( XPATHErrorResources . ER_NODESETDTM_NOT_MUTABLE , null ) ) ; // " This NodeSetDTM is not mutable ! " ) ;
super . removeElementAt ( i ) ; |
public class Locale { /** * Returns a < code > Locale < / code > constructed from the given
* < code > language < / code > , < code > country < / code > and
* < code > variant < / code > . If the same < code > Locale < / code > instance
* is available in the cache , then that instance is
* returned . Otherwise , a new < code > Locale < / code > instance is
* created and cached .
* @ param language lowercase 2 to 8 language code .
* @ param country uppercase two - letter ISO - 3166 code and numric - 3 UN M . 49 area code .
* @ param variant vendor and browser specific code . See class description .
* @ return the < code > Locale < / code > instance requested
* @ exception NullPointerException if any argument is null . */
static Locale getInstance ( String language , String country , String variant ) { } } | return getInstance ( language , "" , country , variant , null ) ; |
public class EventServiceImpl { /** * Collects all non - local registrations and returns them as a { @ link OnJoinRegistrationOperation } .
* @ return the on join operation containing all non - local registrations */
private OnJoinRegistrationOperation getOnJoinRegistrationOperation ( ) { } } | Collection < Registration > registrations = new LinkedList < Registration > ( ) ; for ( EventServiceSegment segment : segments . values ( ) ) { segment . collectRemoteRegistrations ( registrations ) ; } return registrations . isEmpty ( ) ? null : new OnJoinRegistrationOperation ( registrations ) ; |
public class CommonOps_DSCC { /** * Computes the inverse permutation vector
* @ param original Original permutation vector
* @ param inverse It ' s inverse */
public static void permutationInverse ( int [ ] original , int [ ] inverse , int length ) { } } | for ( int i = 0 ; i < length ; i ++ ) { inverse [ original [ i ] ] = i ; } |
public class AbstractEventSerializer { /** * Indicates whether the CloudTrail log has more events to read .
* @ return < code > true < / code > if the log contains more events ; < code > false < / code > otherwise .
* @ throws IOException if the log could not be opened or accessed . */
public boolean hasNextEvent ( ) throws IOException { } } | /* In Fasterxml parser , hasNextEvent will consume next token . So do not call it multiple times . */
JsonToken nextToken = jsonParser . nextToken ( ) ; return nextToken == JsonToken . START_OBJECT || nextToken == JsonToken . START_ARRAY ; |
public class MercatorProjection { /** * Converts a latitude coordinate ( in degrees ) to a pixel Y coordinate at a certain map size .
* @ param latitude the latitude coordinate that should be converted .
* @ param mapSize precomputed size of map .
* @ return the pixel Y coordinate of the latitude value . */
public static double latitudeToPixelY ( double latitude , long mapSize ) { } } | double sinLatitude = Math . sin ( latitude * ( Math . PI / 180 ) ) ; // FIXME improve this formula so that it works correctly without the clipping
double pixelY = ( 0.5 - Math . log ( ( 1 + sinLatitude ) / ( 1 - sinLatitude ) ) / ( 4 * Math . PI ) ) * mapSize ; return Math . min ( Math . max ( 0 , pixelY ) , mapSize ) ; |
public class DbLoadAction { /** * 执行ddl的调用 , 处理逻辑比较简单 : 串行调用
* @ param context
* @ param eventDatas */
private void doDdl ( DbLoadContext context , List < EventData > eventDatas ) { } } | for ( final EventData data : eventDatas ) { DataMedia dataMedia = ConfigHelper . findDataMedia ( context . getPipeline ( ) , data . getTableId ( ) ) ; final DbDialect dbDialect = dbDialectFactory . getDbDialect ( context . getIdentity ( ) . getPipelineId ( ) , ( DbMediaSource ) dataMedia . getSource ( ) ) ; Boolean skipDdlException = context . getPipeline ( ) . getParameters ( ) . getSkipDdlException ( ) ; try { Boolean result = dbDialect . getJdbcTemplate ( ) . execute ( new StatementCallback < Boolean > ( ) { public Boolean doInStatement ( Statement stmt ) throws SQLException , DataAccessException { Boolean result = true ; if ( dbDialect instanceof MysqlDialect && StringUtils . isNotEmpty ( data . getDdlSchemaName ( ) ) ) { // 如果mysql , 执行ddl时 , 切换到在源库执行的schema上
// result & = stmt . execute ( " use " +
// data . getDdlSchemaName ( ) ) ;
// 解决当数据库名称为关键字如 " Order " 的时候 , 会报错 , 无法同步
result &= stmt . execute ( "use `" + data . getDdlSchemaName ( ) + "`" ) ; } result &= stmt . execute ( data . getSql ( ) ) ; return result ; } } ) ; if ( result ) { context . getProcessedDatas ( ) . add ( data ) ; // 记录为成功处理的sql
} else { context . getFailedDatas ( ) . add ( data ) ; } } catch ( Throwable e ) { if ( skipDdlException ) { // do skip
logger . warn ( "skip exception for ddl : {} , caused by {}" , data , ExceptionUtils . getFullStackTrace ( e ) ) ; } else { throw new LoadException ( e ) ; } } } |
public class Database { /** * Alias for { findUniqueOrNull ( rowMapper , SqlQuery . query ( sql , args ) ) } . */
public @ Nullable < T > T findUniqueOrNull ( @ NotNull RowMapper < T > rowMapper , @ NotNull @ SQL String sql , Object ... args ) { } } | return findUniqueOrNull ( rowMapper , SqlQuery . query ( sql , args ) ) ; |
public class AsynchronousRequest { /** * For more info on gliders API go < a href = " https : / / wiki . guildwars2 . com / wiki / API : 2 / gliders " > here < / a > < br / >
* Give user the access to { @ link Callback # onResponse ( Call , Response ) } and { @ link Callback # onFailure ( Call , Throwable ) } methods for custom interactions
* @ param callback callback that is going to be used for { @ link Call # enqueue ( Callback ) }
* @ throws NullPointerException if given { @ link Callback } is empty
* @ see Glider glider info */
public void getAllGliderID ( Callback < List < Integer > > callback ) throws NullPointerException { } } | gw2API . getAllGliderIDs ( ) . enqueue ( callback ) ; |
public class CounterStorage { /** * Enregistre le counter .
* @ return Taille sérialisée non compressée du counter ( estimation pessimiste de l ' occupation mémoire )
* @ throws IOException Exception d ' entrée / sortie */
int writeToFile ( ) throws IOException { } } | if ( storageDisabled ) { return - 1 ; } final File file = getFile ( ) ; if ( counter . getRequestsCount ( ) == 0 && counter . getErrorsCount ( ) == 0 && ! file . exists ( ) ) { // s ' il n ' y a pas de requête , inutile d ' écrire des fichiers de compteurs vides
// ( par exemple pour le compteur ejb s ' il n ' y a pas d ' ejb )
return - 1 ; } final File directory = file . getParentFile ( ) ; if ( ! directory . mkdirs ( ) && ! directory . exists ( ) ) { throw new IOException ( "JavaMelody directory can't be created: " + directory . getPath ( ) ) ; } return writeToFile ( counter , file ) ; |
public class JsonApiRequestProcessor { /** * Determines whether the supplied HTTP request is considered a JSON - API request .
* @ param requestContext The HTTP request
* @ param acceptPlainJson Whether a plain JSON request should also be considered a JSON - API request
* @ return < code > true < / code > if it is a JSON - API request ; < code > false < / code > otherwise
* @ since 2.4 */
@ SuppressWarnings ( "UnnecessaryLocalVariable" ) public static boolean isJsonApiRequest ( HttpRequestContext requestContext , boolean acceptPlainJson ) { } } | String method = requestContext . getMethod ( ) . toUpperCase ( ) ; boolean isPatch = method . equals ( HttpMethod . PATCH . toString ( ) ) ; boolean isPost = method . equals ( HttpMethod . POST . toString ( ) ) ; if ( isPatch || isPost ) { String contentType = requestContext . getRequestHeader ( HttpHeaders . HTTP_CONTENT_TYPE ) ; if ( contentType == null || ! contentType . startsWith ( HttpHeaders . JSONAPI_CONTENT_TYPE ) ) { LOGGER . warn ( "not a JSON-API request due to content type {}" , contentType ) ; return false ; } } // short - circuit each of the possible Accept MIME type checks , so that we don ' t keep comparing after we have already
// found a match . Intentionally kept as separate statements ( instead of a big , chained | | ) to ease debugging / maintenance .
boolean acceptsJsonApi = requestContext . accepts ( HttpHeaders . JSONAPI_CONTENT_TYPE ) ; boolean acceptsAny = acceptsJsonApi || requestContext . acceptsAny ( ) ; boolean acceptsPlainJson = acceptsAny || ( acceptPlainJson && requestContext . accepts ( "application/json" ) ) ; LOGGER . debug ( "accepting request as JSON-API: {}" , acceptPlainJson ) ; return acceptsPlainJson ; |
public class WordUtils { /** * < p > Wraps a single line of text , identifying words by < code > wrapOn < / code > . < / p >
* < p > Leading spaces on a new line are stripped .
* Trailing spaces are not stripped . < / p >
* < table border = " 1 " summary = " Wrap Results " >
* < tr >
* < th > input < / th >
* < th > wrapLength < / th >
* < th > newLineString < / th >
* < th > wrapLongWords < / th >
* < th > wrapOn < / th >
* < th > result < / th >
* < / tr >
* < tr >
* < td > null < / td >
* < td > * < / td >
* < td > * < / td >
* < td > true / false < / td >
* < td > * < / td >
* < td > null < / td >
* < / tr >
* < tr >
* < td > " " < / td >
* < td > * < / td >
* < td > * < / td >
* < td > true / false < / td >
* < td > * < / td >
* < td > " " < / td >
* < / tr >
* < tr >
* < td > " Here is one line of text that is going to be wrapped after 20 columns . " < / td >
* < td > 20 < / td >
* < td > " \ n " < / td >
* < td > true / false < / td >
* < td > " " < / td >
* < td > " Here is one line of \ ntext that is going \ nto be wrapped after \ n20 columns . " < / td >
* < / tr >
* < tr >
* < td > " Here is one line of text that is going to be wrapped after 20 columns . " < / td >
* < td > 20 < / td >
* < td > " & lt ; br / & gt ; " < / td >
* < td > true / false < / td >
* < td > " " < / td >
* < td > " Here is one line of & lt ; br / & gt ; text that is going & lt ; br / & gt ; to be wrapped after & lt ; br / & gt ; 20 columns . " < / td >
* < / tr >
* < tr >
* < td > " Here is one line of text that is going to be wrapped after 20 columns . " < / td >
* < td > 20 < / td >
* < td > null < / td >
* < td > true / false < / td >
* < td > " " < / td >
* < td > " Here is one line of " + systemNewLine + " text that is going " + systemNewLine + " to be wrapped after " + systemNewLine + " 20 columns . " < / td >
* < / tr >
* < tr >
* < td > " Click here to jump to the commons website - http : / / commons . apache . org " < / td >
* < td > 20 < / td >
* < td > " \ n " < / td >
* < td > false < / td >
* < td > " " < / td >
* < td > " Click here to jump \ nto the commons \ nwebsite - \ nhttp : / / commons . apache . org " < / td >
* < / tr >
* < tr >
* < td > " Click here to jump to the commons website - http : / / commons . apache . org " < / td >
* < td > 20 < / td >
* < td > " \ n " < / td >
* < td > true < / td >
* < td > " " < / td >
* < td > " Click here to jump \ nto the commons \ nwebsite - \ nhttp : / / commons . apach \ ne . org " < / td >
* < / tr >
* < tr >
* < td > " flammable / inflammable " < / td >
* < td > 20 < / td >
* < td > " \ n " < / td >
* < td > true < / td >
* < td > " / " < / td >
* < td > " flammable \ ninflammable " < / td >
* < / tr >
* < / table >
* @ param str the String to be word wrapped , may be null
* @ param wrapLength the column to wrap the words at , less than 1 is treated as 1
* @ param newLineStr the string to insert for a new line ,
* < code > null < / code > uses the system property line separator
* @ param wrapLongWords true if long words ( such as URLs ) should be wrapped
* @ param wrapOn regex expression to be used as a breakable characters ,
* if blank string is provided a space character will be used
* @ return a line with newlines inserted , < code > null < / code > if null input */
public static String wrap ( final String str , int wrapLength , String newLineStr , final boolean wrapLongWords , String wrapOn ) { } } | if ( str == null ) { return null ; } if ( newLineStr == null ) { newLineStr = "\n" ; } if ( wrapLength < 1 ) { wrapLength = 1 ; } if ( StringUtils . isBlank ( wrapOn ) ) { wrapOn = " " ; } final RegExp patternToWrapOn = RegExp . compile ( wrapOn ) ; final int inputLineLength = str . length ( ) ; int offset = 0 ; final StringBuilder wrappedLine = new StringBuilder ( inputLineLength + 32 ) ; while ( offset < inputLineLength ) { int spaceToWrapAt = - 1 ; MatchResult matcher = patternToWrapOn . exec ( str . substring ( offset , Math . min ( offset + wrapLength + 1 , inputLineLength ) ) ) ; if ( matcher != null && matcher . getGroupCount ( ) > 0 ) { final String firstMatch = matcher . getGroup ( 0 ) ; final int start = StringUtils . indexOf ( matcher . getInput ( ) , firstMatch ) ; if ( matcher . getInput ( ) . startsWith ( firstMatch ) ) { final int end = start + StringUtils . length ( firstMatch ) ; offset += end ; continue ; } else { spaceToWrapAt = start ; } } // only last line without leading spaces is left
if ( inputLineLength - offset <= wrapLength ) { break ; } if ( matcher != null && matcher . getGroupCount ( ) > 0 ) { final String lastMatch = matcher . getGroup ( matcher . getGroupCount ( ) - 1 ) ; final int start = StringUtils . lastIndexOf ( matcher . getInput ( ) , lastMatch ) ; spaceToWrapAt = start + offset ; } if ( spaceToWrapAt >= offset ) { // normal case
wrappedLine . append ( str , offset , spaceToWrapAt ) ; wrappedLine . append ( newLineStr ) ; offset = spaceToWrapAt + 1 ; } else { // really long word or URL
if ( wrapLongWords ) { // wrap really long word one line at a time
wrappedLine . append ( str , offset , wrapLength + offset ) ; wrappedLine . append ( newLineStr ) ; offset += wrapLength ; } else { // do not wrap really long word , just extend beyond limit
matcher = patternToWrapOn . exec ( str . substring ( offset + wrapLength ) ) ; if ( matcher != null && matcher . getGroupCount ( ) > 0 ) { final String firstMatch = matcher . getGroup ( 0 ) ; final int start = StringUtils . indexOf ( matcher . getInput ( ) , firstMatch ) ; spaceToWrapAt = start + offset + wrapLength ; } if ( spaceToWrapAt >= 0 ) { wrappedLine . append ( str , offset , spaceToWrapAt ) ; wrappedLine . append ( newLineStr ) ; offset = spaceToWrapAt + 1 ; } else { wrappedLine . append ( str , offset , str . length ( ) ) ; offset = inputLineLength ; } } } } // Whatever is left in line is short enough to just pass through
wrappedLine . append ( str , offset , str . length ( ) ) ; return wrappedLine . toString ( ) ; |
public class JvmInnerTypeReferenceImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
public void setOuter ( JvmParameterizedTypeReference newOuter ) { } } | if ( newOuter != outer ) { NotificationChain msgs = null ; if ( outer != null ) msgs = ( ( InternalEObject ) outer ) . eInverseRemove ( this , EOPPOSITE_FEATURE_BASE - TypesPackage . JVM_INNER_TYPE_REFERENCE__OUTER , null , msgs ) ; if ( newOuter != null ) msgs = ( ( InternalEObject ) newOuter ) . eInverseAdd ( this , EOPPOSITE_FEATURE_BASE - TypesPackage . JVM_INNER_TYPE_REFERENCE__OUTER , null , msgs ) ; msgs = basicSetOuter ( newOuter , msgs ) ; if ( msgs != null ) msgs . dispatch ( ) ; } else if ( eNotificationRequired ( ) ) eNotify ( new ENotificationImpl ( this , Notification . SET , TypesPackage . JVM_INNER_TYPE_REFERENCE__OUTER , newOuter , newOuter ) ) ; |
public class BuryingLogic { /** * implements the visitor to reset the opcode stack , and initialize if tracking collections
* @ param classContext
* the currently parsed java class */
@ Override public void visitClassContext ( ClassContext classContext ) { } } | try { stack = new OpcodeStack ( ) ; ifBlocks = new IfBlocks ( ) ; gotoBranchPCs = new BitSet ( ) ; casePositions = new BitSet ( ) ; super . visitClassContext ( classContext ) ; } finally { stack = null ; ifBlocks = null ; catchPCs = null ; gotoBranchPCs = null ; casePositions = null ; } |
public class MtasToken { /** * Adds the positions .
* @ param positions the positions */
final public void addPositions ( int [ ] positions ) { } } | if ( positions != null && positions . length > 0 ) { if ( tokenPosition == null ) { tokenPosition = new MtasPosition ( positions ) ; } else { tokenPosition . add ( positions ) ; } } |
public class FedoraTypesUtils { /** * Given an internal reference node property , get the original name
* @ param refPropertyName the reference node property name
* @ return original property name of the reference property */
public static String getReferencePropertyOriginalName ( final String refPropertyName ) { } } | final int i = refPropertyName . lastIndexOf ( REFERENCE_PROPERTY_SUFFIX ) ; return i < 0 ? refPropertyName : refPropertyName . substring ( 0 , i ) ; |
public class PathBuilder { /** * Create a new Simple path
* @ param < A >
* @ param path existing path
* @ return property path */
@ SuppressWarnings ( "unchecked" ) public < A > SimplePath < A > get ( Path < A > path ) { } } | SimplePath < A > newPath = getSimple ( toString ( path ) , ( Class < A > ) path . getType ( ) ) ; return addMetadataOf ( newPath , path ) ; |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.